Theory Concl_Pres_Clarification
theory Concl_Pres_Clarification
imports Main
begin
text ‹Clarification and clarsimp that preserve the structure of
the subgoal's conclusion, i.e., neither solve it, nor swap it
with premises, as, eg, @{thm [source] notE} does.
›
ML ‹
local
open Classical
fun is_cp_brl (is_elim,thm) = let
val prems = Thm.prems_of thm
val nprems = length prems
val concl = Thm.concl_of thm
in
(if is_elim then nprems=2 else nprems=1) andalso let
val lprem_concl = hd (rev prems)
|> Logic.strip_assums_concl
in
concl aconv lprem_concl
end
end
val not_elim = @{thm notE}
val hyp_subst_tacs = [Hypsubst.hyp_subst_tac]
fun eq_contr_tac ctxt i = ematch_tac ctxt [not_elim] i THEN eq_assume_tac i;
fun eq_assume_contr_tac ctxt = eq_assume_tac ORELSE' eq_contr_tac ctxt;
fun cp_bimatch_from_nets_tac ctxt =
biresolution_from_nets_tac ctxt (order_list o filter (is_cp_brl o snd)) true;
in
fun cp_clarify_step_tac ctxt =
let val {safep_netpair, ...} = (rep_cs o claset_of) ctxt in
appSWrappers ctxt
(FIRST'
[eq_assume_contr_tac ctxt,
FIRST' (map (fn tac => tac ctxt) hyp_subst_tacs),
cp_bimatch_from_nets_tac ctxt safep_netpair
])
end;
fun cp_clarify_tac ctxt = SELECT_GOAL (REPEAT_DETERM (cp_clarify_step_tac ctxt 1));
fun cp_clarsimp_tac ctxt =
Simplifier.safe_asm_full_simp_tac ctxt THEN_ALL_NEW
cp_clarify_tac (addSss ctxt);
end
›
method_setup cp_clarify = ‹
(Classical.cla_method' (CHANGED_PROP oo cp_clarify_tac))
›
method_setup cp_clarsimp = ‹let
fun clasimp_method' tac =
Method.sections clasimp_modifiers >> K (SIMPLE_METHOD' o tac);
in
clasimp_method' (CHANGED_PROP oo cp_clarsimp_tac)
end›
end
Theory Named_Theorems_Rev
theory Named_Theorems_Rev
imports Main
keywords "named_theorems_rev" :: thy_decl
begin
ML ‹
signature NAMED_THEOREMS_REV =
sig
val member: Proof.context -> string -> thm -> bool
val get: Proof.context -> string -> thm list
val add_thm: string -> thm -> Context.generic -> Context.generic
val del_thm: string -> thm -> Context.generic -> Context.generic
val add: string -> attribute
val del: string -> attribute
val check: Proof.context -> string * Position.T -> string
val declare: binding -> string -> local_theory -> string * local_theory
end;
structure Named_Theorems_Rev: NAMED_THEOREMS_REV =
struct
structure Data = Generic_Data
(
type T = thm Item_Net.T Symtab.table;
val empty: T = Symtab.empty;
val extend = I;
val merge : T * T -> T = Symtab.join (K Item_Net.merge);
);
fun new_entry name =
Data.map (fn data =>
if Symtab.defined data name
then error ("Duplicate declaration of named theorems: " ^ quote name)
else Symtab.update (name, Thm.full_rules) data);
fun undeclared name = "Undeclared named theorems " ^ quote name;
fun the_entry context name =
(case Symtab.lookup (Data.get context) name of
NONE => error (undeclared name)
| SOME entry => entry);
fun map_entry name f context =
(the_entry context name; Data.map (Symtab.map_entry name f) context);
fun member ctxt = Item_Net.member o the_entry (Context.Proof ctxt);
fun content context = Item_Net.content o the_entry context;
val get = content o Context.Proof;
fun add_thm name = map_entry name o Item_Net.update;
fun del_thm name = map_entry name o Item_Net.remove;
val add = Thm.declaration_attribute o add_thm;
val del = Thm.declaration_attribute o del_thm;
fun check ctxt (xname, pos) =
let
val context = Context.Proof ctxt;
val fact_ref = Facts.Named ((xname, Position.none), NONE);
fun err () = error (undeclared xname ^ Position.here pos);
in
(case try (Proof_Context.get_fact_generic context) fact_ref of
SOME (SOME name, _) => if can (the_entry context) name then name else err ()
| _ => err ())
end;
fun declare binding descr lthy =
let
val name = Local_Theory.full_name lthy binding;
val description =
"declaration of " ^ (if descr = "" then Binding.name_of binding ^ " rules" else descr);
val lthy' = lthy
|> Local_Theory.background_theory (Context.theory_map (new_entry name))
|> Local_Theory.map_contexts (K (Context.proof_map (new_entry name)))
|> Local_Theory.add_thms_dynamic (binding, fn context => content context name)
|> Attrib.local_setup binding (Attrib.add_del (add name) (del name)) description
in (name, lthy') end;
val _ =
Outer_Syntax.local_theory @{command_keyword named_theorems_rev}
"declare named collection of theorems"
(Parse.and_list1 (Parse.binding -- Scan.optional Parse.text "") >>
fold (fn (b, descr) => snd o declare b descr));
val _ = Theory.setup
(ML_Antiquotation.inline @{binding named_theorems_rev}
(Args.context -- Scan.lift Args.name_position >>
(fn (ctxt, name) => ML_Syntax.print_string (check ctxt name))));
end;
›
end
Theory Pf_Add
theory Pf_Add
imports Automatic_Refinement.Misc "HOL-Library.Monad_Syntax"
begin
lemma fun_ordI:
assumes "⋀x. ord (f x) (g x)"
shows "fun_ord ord f g"
using assms unfolding fun_ord_def by auto
lemma fun_ordD:
assumes "fun_ord ord f g"
shows "ord (f x) (g x)"
using assms unfolding fun_ord_def by auto
lemma mono_fun_fun_cnv:
assumes "⋀d. monotone (fun_ord ordA) ordB (λx. F x d)"
shows "monotone (fun_ord ordA) (fun_ord ordB) F"
apply rule
apply (rule fun_ordI)
using assms
by (blast dest: monotoneD)
lemma fun_lub_Sup[simp]: "fun_lub Sup = Sup"
unfolding fun_lub_def[abs_def]
by (clarsimp intro!: ext; metis image_def)
lemma fun_ord_le[simp]: "fun_ord (≤) = (≤)"
unfolding fun_ord_def[abs_def]
by (auto intro!: ext simp: le_fun_def)
end
Theory Pf_Mono_Prover
section ‹Interfacing Partial-Function's Monotonicity Prover›
theory Pf_Mono_Prover
imports Separation_Logic_Imperative_HOL.Sep_Main
begin
ML ‹
structure Pf_Mono_Prover = struct
fun mono_tac ctxt = (REPEAT o eresolve_tac ctxt @{thms thin_rl})
THEN' Partial_Function.mono_tac ctxt
end
›
method_setup pf_mono = ‹Scan.succeed (fn ctxt => SIMPLE_METHOD' (Pf_Mono_Prover.mono_tac ctxt))› ‹Monotonicity prover of the partial function package›
end
File ‹PO_Normalizer.ML›
signature PO_NORMALIZER = sig
type norm_set = {
trans_rules : thm list,
cong_rules : thm list,
norm_rules : thm list,
refl_rules : thm list
}
val gen_norm_tac : norm_set -> Proof.context -> tactic'
val gen_norm_rule : thm list -> norm_set -> Proof.context -> thm -> thm
end
structure PO_Normalizer : PO_NORMALIZER = struct
type norm_set = {
trans_rules : thm list,
cong_rules : thm list,
norm_rules : thm list,
refl_rules : thm list
}
val cfg_trace =
Attrib.setup_config_bool @{binding "norm_rel_trace"} (K false)
val cfg_depth_limit =
Attrib.setup_config_int @{binding "norm_rel_depth_limit"} (K ~1)
fun gen_norm_tac {trans_rules, cong_rules, norm_rules, refl_rules} ctxt = let
val do_trace = Config.get ctxt cfg_trace
fun trace_tac str _ st = if do_trace then
(tracing str; Seq.single st)
else Seq.single st
val print_tac = if do_trace then print_tac else (K (K all_tac))
val depth_limit = Config.get ctxt cfg_depth_limit
fun norm_tac d ctxt i st = let
val transr_tac = resolve_tac ctxt trans_rules
val congr_tac = resolve_tac ctxt cong_rules
val rewrr_tac = resolve_tac ctxt norm_rules
val solver_tac = resolve_tac ctxt refl_rules
val cong_tac = (transr_tac THEN' (
(congr_tac THEN' trace_tac "cong") THEN_ALL_NEW_FWD norm_tac (d+1) ctxt))
val rewr_tac = (transr_tac THEN' (SOLVED' rewrr_tac)
THEN' trace_tac "rewr" THEN' transr_tac THEN' norm_tac (d+1) ctxt)
val solve_tac = SOLVED' solver_tac THEN' (K (print_tac ctxt "solved"))
in
if depth_limit>=0 andalso d>depth_limit then
(K (print_tac ctxt "Norm-Depth limit reached"))
THEN' solve_tac
else
(K (print_tac ctxt ("Normalizing ("^ string_of_int d ^")"))) THEN'
(TRY o cong_tac)
THEN' (TRY o rewr_tac)
THEN' solve_tac
end i st
in norm_tac 1 ctxt end
fun gen_norm_rule init_thms norm_set ctxt thm = let
val orig_ctxt = ctxt
val ((_,[thm]),ctxt) = Variable.import false [thm] ctxt
fun tac ctxt =
eresolve_tac ctxt init_thms
THEN' gen_norm_tac norm_set ctxt
val concl = Thm.concl_of thm
val x = Var (("x",0),@{typ prop})
val t = @{mk_term "PROP ?concl ⟹ PROP ?x"}
val thm2 = Goal.prove ctxt [] [] t
(fn {context = ctxt, ...} => tac ctxt 1)
val thm = thm RS thm2
val [thm] = Variable.export ctxt orig_ctxt [thm]
in
thm
end
end
Theory Sepref_Misc
theory Sepref_Misc
imports
Refine_Monadic.Refine_Monadic
PO_Normalizer
"List-Index.List_Index"
Separation_Logic_Imperative_HOL.Sep_Main
Named_Theorems_Rev
"HOL-Eisbach.Eisbach"
Separation_Logic_Imperative_HOL.Array_Blit
begin
hide_const (open) CONSTRAINT
lemma index_of_last_distinct[simp]:
"distinct l ⟹ index l (last l) = length l - 1"
apply (cases l rule: rev_cases)
apply (auto simp: index_append)
done
lemma index_eqlen_conv[simp]: "index l x = length l ⟷ x∉set l"
by (auto simp: index_size_conv)
subsection ‹Iterated Curry and Uncurry›
text ‹Uncurry0›
definition "uncurry0 c ≡ λ_::unit. c"
definition curry0 :: "(unit ⇒ 'a) ⇒ 'a" where "curry0 f = f ()"
lemma uncurry0_apply[simp]: "uncurry0 c x = c" by (simp add: uncurry0_def)
lemma curry_uncurry0_id[simp]: "curry0 (uncurry0 f) = f" by (simp add: curry0_def)
lemma uncurry_curry0_id[simp]: "uncurry0 (curry0 g) = g" by (auto simp: curry0_def)
lemma param_uncurry0[param]: "(uncurry0,uncurry0) ∈ A → (unit_rel→A)" by auto
text ‹Abbreviations for higher-order uncurries›
abbreviation "uncurry2 f ≡ uncurry (uncurry f)"
abbreviation "curry2 f ≡ curry (curry f)"
abbreviation "uncurry3 f ≡ uncurry (uncurry2 f)"
abbreviation "curry3 f ≡ curry (curry2 f)"
abbreviation "uncurry4 f ≡ uncurry (uncurry3 f)"
abbreviation "curry4 f ≡ curry (curry3 f)"
abbreviation "uncurry5 f ≡ uncurry (uncurry4 f)"
abbreviation "curry5 f ≡ curry (curry4 f)"
abbreviation "uncurry6 f ≡ uncurry (uncurry5 f)"
abbreviation "curry6 f ≡ curry (curry5 f)"
abbreviation "uncurry7 f ≡ uncurry (uncurry6 f)"
abbreviation "curry7 f ≡ curry (curry6 f)"
abbreviation "uncurry8 f ≡ uncurry (uncurry7 f)"
abbreviation "curry8 f ≡ curry (curry7 f)"
abbreviation "uncurry9 f ≡ uncurry (uncurry8 f)"
abbreviation "curry9 f ≡ curry (curry8 f)"
lemma fold_partial_uncurry: "uncurry (λ(ps, cf). f ps cf) = uncurry2 f" by auto
lemma curry_shl:
"⋀g f. (g ≡ curry f) ≡ (uncurry g ≡ f)"
"⋀g f. (g ≡ curry0 f) ≡ (uncurry0 g ≡ f)"
by (atomize (full); auto)+
lemma curry_shr:
"⋀f g. (curry f ≡ g) ≡ (f ≡ uncurry g)"
"⋀f g. (curry0 f ≡ g) ≡ (f ≡ uncurry0 g)"
by (atomize (full); auto)+
lemmas uncurry_shl = curry_shr[symmetric]
lemmas uncurry_shr = curry_shl[symmetric]
end
Theory Structured_Apply
section ‹Subgoal Structure for Apply Scripts›
theory Structured_Apply
imports Main
keywords
"focus" "solved" "applyS" "apply1" "applyF" "applyT" :: prf_script
begin
text ‹This theory provides some variants of the apply command
that make the proof structure explicit. See below for examples.
Compared to the @{command subgoal}-command, these set of commands is more lightweight,
and fully supports schematic variables.
›
ML ‹
signature STRUCTURED_APPLY = sig
val focus: Proof.state -> Proof.state
val solved: Proof.state -> Proof.state
val unfocus: Proof.state -> Proof.state
val apply1: Method.text_range -> Proof.state -> Proof.state Seq.result Seq.seq
val applyT: Method.text * Position.range -> Proof.state -> Proof.state Seq.result Seq.seq
val apply_focus: Method.text_range -> Proof.state -> Proof.state Seq.result Seq.seq
val apply_solve: Method.text_range -> Proof.state -> Proof.state Seq.result Seq.seq
end
structure Structured_Apply: STRUCTURED_APPLY = struct
val focus = Proof.refine_primitive (K (Goal.restrict 1 1))
val unfocus = Proof.refine_primitive (K (Goal.unrestrict 1))
val solved = Proof.refine_primitive (fn _ => fn thm => let
val _ = if Thm.nprems_of thm > 0 then error "Subgoal not solved" else ()
in
Goal.unrestrict 1 thm
end
)
fun apply_focus m = focus #> Proof.apply m
fun assert_num_solved d msg m s = let
val n_subgoals = Proof.raw_goal #> #goal #> Thm.nprems_of
val n1 = n_subgoals s
fun do_assert s = if n1 - n_subgoals s <> d then error msg else s
in
s
|> Proof.apply m
|> Seq.map_result do_assert
end
fun apply_solve m =
focus
#> assert_num_solved 1 "Subgoal not solved" m
#> Seq.map_result unfocus
fun apply1 m =
focus
#> assert_num_solved 0 "Method must not produce or solve subgoals" m
#> Seq.map_result unfocus
fun applyT (m,pos) = let
open Method
val m = Combinator (no_combinator_info, Select_Goals 1, [m])
in
Proof.apply (m,pos)
end
end
val _ =
Outer_Syntax.command @{command_keyword solved} "Primitive unfocus after subgoal is solved"
(Scan.succeed ( Toplevel.proof (Structured_Apply.solved) ));
val _ =
Outer_Syntax.command @{command_keyword focus} "Primitive focus then optionally apply method"
(Scan.option Method.parse >> (fn
NONE => Toplevel.proof (Structured_Apply.focus)
| SOME m => (Method.report m; Toplevel.proofs (Structured_Apply.apply_focus m))
));
val _ =
Outer_Syntax.command @{command_keyword applyF} "Primitive focus then apply method"
(Method.parse >> (fn m => (Method.report m;
Toplevel.proofs (Structured_Apply.apply_focus m)
)));
val _ =
Outer_Syntax.command @{command_keyword applyS} "Apply method that solves exactly one subgoal"
(Method.parse >> (fn m => (Method.report m;
Toplevel.proofs (Structured_Apply.apply_solve m)
)));
val _ =
Outer_Syntax.command @{command_keyword apply1} "Apply method that does not change number of subgoals"
(Method.parse >> (fn m => (Method.report m;
Toplevel.proofs (Structured_Apply.apply1 m)
)));
val _ =
Outer_Syntax.command @{command_keyword applyT} "Apply method on first subgoal"
(Method.parse >> (fn m => (Method.report m;
Toplevel.proofs (Structured_Apply.applyT m)
)));
›
end
Theory Term_Synth
section ‹Rule-Based Synthesis of Terms›
theory Term_Synth
imports Sepref_Misc
begin
definition SYNTH_TERM :: "'a::{} ⇒ 'b::{} ⇒ bool"
where [simp]: "SYNTH_TERM x y ≡ True"
consts SDUMMY :: "'a :: {}"
named_theorems_rev synth_rules ‹Term synthesis rules›
text ‹Term synthesis works by proving @{term "SYNTH_TERM t v"}, by repeatedly applying the
first matching intro-rule from ‹synth_rules›. ›
ML ‹
signature TERM_SYNTH = sig
val synth_term: thm list -> Proof.context -> term -> term
end
structure Term_Synth : TERM_SYNTH = struct
fun replace_sdummies t = let
fun r (t1$t2) n = let
val (t1,n) = r t1 n
val (t2,n) = r t2 n
in (t1$t2,n) end
| r (Abs (x,T,t)) n = let
val (t,n) = r t n
in (Abs (x,T,t),n) end
| r @{mpat (typs) "SDUMMY::?'v_T"} n = (Var (("_dummy",n),T),n+1)
| r (t' as (Var ((name,_),_))) n = if String.isPrefix "_" name then raise TERM ("replace_sdummies: Term already contains dummy patterns",[t',t]) else (t',n)
| r t n = (t,n)
in
fst (r t 0)
end
fun synth_term thms ctxt t = let
val orig_ctxt = ctxt
val (t,ctxt) = yield_singleton (Variable.import_terms true) t ctxt
val v = Var (("result",0),TVar (("T",0),[]))
val goal = @{mk_term "Trueprop (SYNTH_TERM ?t ?v)"} |> Thm.cterm_of ctxt
val rules = thms @ Named_Theorems_Rev.get ctxt @{named_theorems_rev synth_rules}
|> Tactic.build_net
fun tac ctxt = ALLGOALS (TRY_SOLVED' (
REPEAT_DETERM' (CHANGED o resolve_from_net_tac ctxt rules)))
val thm = Goal.prove_internal ctxt [] goal (fn _ => tac ctxt)
val res = case Thm.concl_of thm of
@{mpat "Trueprop (SYNTH_TERM _ ?res)"} => res
| _ => raise THM("Synth_Term: Proved a different theorem?",~1,[thm])
val res = singleton (Variable.export_terms ctxt orig_ctxt) res
|> replace_sdummies
in
res
end
end
›
end
Theory User_Smashing
theory User_Smashing
imports Pure
begin
ML ‹
fun enumerate xs = fold (fn x => fn (i, xs) => (i +1, (x, i) :: xs)) xs (0, []) |> snd
›
ML ‹
fun dummy_abs _ [] t = t
| dummy_abs n (T :: Ts) t = Abs ("x" ^ Int.toString n, T, dummy_abs (n + 1) Ts t)
›
ML ‹
fun common_prefix Ts (t1 as Abs (_, T, t)) (u1 as Abs (_, U, u)) =
if U = T then common_prefix (T :: Ts) t u else ([], t1, u1)
| common_prefix Ts t u = (Ts, t, u);
fun dest_app acc (t $ u) = dest_app (u :: acc) t
| dest_app acc t = (t, acc);
fun add_bound (Bound i, n) bs = (i, n) :: bs
| add_bound _ bs = bs;
›
ML ‹
fun smash_pair ctxt thm (t, u) =
let
val idx = Thm.maxidx_of thm + 1;
val ctxt' = ctxt;
val (Ts, t1, _) = common_prefix [] t u;
val (tas, t2) = Term.strip_abs t;
val (uas, u2) = Term.strip_abs u;
val (tx as Var (_, T1), ts) = Term.strip_comb t2;
val (ux as Var (_, U1), us) = Term.strip_comb u2;
val Ts1 = Term.binder_types T1;
val Us1 = Term.binder_types U1;
val T = Term.fastype_of1 (Ts, t1);
val tshift = length tas - length Ts;
val ushift = length uas - length Ts;
val tbs = fold add_bound (enumerate (rev ts)) [] |> map (apfst (fn i => i - tshift));
val ubs = fold add_bound (enumerate (rev us)) [] |> map (apfst (fn i => i - ushift));
val bounds = inter (op =) (map fst tbs) (map fst ubs) |> distinct (=);
val T' = map (nth Ts) bounds ---> T;
val v = Var (("simon", idx), T');
val tbs' = map (fn i => find_first (fn (j, _) => i = j) tbs |> the |> snd |> Bound) bounds;
val t' = list_comb (v, tbs') |> dummy_abs 0 Ts1;
val ubs' = map (fn i => find_first (fn (j, _) => i = j) ubs |> the |> snd |> Bound) bounds;
val u' = list_comb (v, ubs') |> dummy_abs 0 Us1;
val subst = [(Term.dest_Var tx, Thm.cterm_of ctxt' t'), (Term.dest_Var ux, Thm.cterm_of ctxt' u')];
in
instantiate_normalize ([], subst) thm
end;
fun smash ctxt thm =
case (Thm.tpairs_of thm) of
[] => thm
| (p :: _) => smash_pair ctxt thm p;
fun smashed_attrib ctxt thm =
(NONE, SOME (smash ctxt thm));
›
ML ‹
val smash_new_rule = Seq.single oo smash;
›
end
Theory Sepref_Id_Op
section ‹Operation Identification Phase›
theory Sepref_Id_Op
imports
Main
Automatic_Refinement.Refine_Lib
Automatic_Refinement.Autoref_Tagging
"Lib/Named_Theorems_Rev"
begin
text ‹
The operation identification phase is adapted from the Autoref tool.
The basic idea is to have a type system, which works on so called
interface types (also called conceptual types). Each conceptual type
denotes an abstract data type, e.g., set, map, priority queue.
Each abstract operation, which must be a constant applied to its arguments,
is assigned a conceptual type. Additionally, there is a set of
{\emph pattern rewrite rules},
which are applied to subterms before type inference takes place, and
which may be backtracked over.
This way, encodings of abstract operations in Isabelle/HOL, like
@{term [source] "λ_. None"} for the empty map,
or @{term [source] "fun_upd m k (Some v)"} for map update, can be rewritten
to abstract operations, and get properly typed.
›
subsection "Proper Protection of Term"
text ‹ The following constants are meant to encode abstraction and
application as proper HOL-constants, and thus avoid strange effects with
HOL's higher-order unification heuristics and automatic
beta and eta-contraction.
The first step of operation identification is to protect the term
by replacing all function applications and abstractions be
the constants defined below.
›
definition [simp]: "PROTECT2 x (y::prop) ≡ x"
consts DUMMY :: "prop"
abbreviation PROTECT2_syn ("'(#_#')") where "PROTECT2_syn t ≡ PROTECT2 t DUMMY"
abbreviation (input)ABS2 :: "('a⇒'b)⇒'a⇒'b" (binder "λ⇩2" 10)
where "ABS2 f ≡ (λx. PROTECT2 (f x) DUMMY)"
lemma beta: "(λ⇩2x. f x)$x ≡ f x" by simp
text ‹
Another version of @{const "APP"}. Treated like @{const APP} by our tool.
Required to avoid infinite pattern rewriting in some cases, e.g., map-lookup.
›
definition APP' (infixl "$''" 900) where [simp, autoref_tag_defs]: "f$'a ≡ f a"
text ‹
Sometimes, whole terms should be protected from being processed by our tool.
For example, our tool should not look into numerals. For this reason,
the ‹PR_CONST› tag indicates terms that our tool shall handle as
atomic constants, an never look into them.
The special form ‹UNPROTECT› can be used inside pattern rewrite rules.
It has the effect to revert the protection from its argument, and then wrap
it into a ‹PR_CONST›.
›
definition [simp, autoref_tag_defs]: "PR_CONST x ≡ x"
definition [simp, autoref_tag_defs]: "UNPROTECT x ≡ x"
subsection ‹Operation Identification›
text ‹ Indicator predicate for conceptual typing of a constant ›
definition intf_type :: "'a ⇒ 'b itself ⇒ bool" (infix "::⇩i" 10) where
[simp]: "c::⇩iI ≡ True"
lemma itypeI: "c::⇩iI" by simp
lemma itypeI': "intf_type c TYPE('T)" by (rule itypeI)
lemma itype_self: "(c::'a) ::⇩i TYPE('a)" by simp
definition CTYPE_ANNOT :: "'b ⇒ 'a itself ⇒ 'b" (infix ":::⇩i" 10) where
[simp]: "c:::⇩iI ≡ c"
text ‹ Wrapper predicate for an conceptual type inference ›
definition ID :: "'a ⇒ 'a ⇒ 'c itself ⇒ bool"
where [simp]: "ID t t' T ≡ t=t'"
subsubsection ‹Conceptual Typing Rules›
lemma ID_unfold_vars: "ID x y T ⟹ x≡y" by simp
lemma ID_PR_CONST_trigger: "ID (PR_CONST x) y T ⟹ ID (PR_CONST x) y T" .
lemma pat_rule:
"⟦ p≡p'; ID p' t' T ⟧ ⟹ ID p t' T" by simp
lemma app_rule:
"⟦ ID f f' TYPE('a⇒'b); ID x x' TYPE('a)⟧ ⟹ ID (f$x) (f'$x') TYPE('b)"
by simp
lemma app'_rule:
"⟦ ID f f' TYPE('a⇒'b); ID x x' TYPE('a)⟧ ⟹ ID (f$'x) (f'$x') TYPE('b)"
by simp
lemma abs_rule:
"⟦ ⋀x x'. ID x x' TYPE('a) ⟹ ID (t x) (t' x x') TYPE('b) ⟧ ⟹
ID (λ⇩2x. t x) (λ⇩2x'. t' x' x') TYPE('a⇒'b)"
by simp
lemma id_rule: "c::⇩iI ⟹ ID c c I" by simp
lemma annot_rule: "ID t t' I ⟹ ID (t:::⇩iI) t' I"
by simp
lemma fallback_rule:
"ID (c::'a) c TYPE('c)"
by simp
lemma unprotect_rl1: "ID (PR_CONST x) t T ⟹ ID (UNPROTECT x) t T"
by simp
subsection ‹ ML-Level code ›
ML ‹
infix 0 THEN_ELSE_COMB'
signature ID_OP_TACTICAL = sig
val SOLVE_FWD: tactic' -> tactic'
val DF_SOLVE_FWD: bool -> tactic' -> tactic'
end
structure Id_Op_Tactical :ID_OP_TACTICAL = struct
fun SOLVE_FWD tac i st = SOLVED' (
tac
THEN_ALL_NEW_FWD (SOLVE_FWD tac)) i st
fun DF_SOLVE_FWD dbg tac = let
val stuck_list_ref = Unsynchronized.ref []
fun stuck_tac _ st = if dbg then (
stuck_list_ref := st :: !stuck_list_ref;
Seq.empty
) else Seq.empty
fun rec_tac i st = (
(tac THEN_ALL_NEW_FWD (SOLVED' rec_tac))
ORELSE' stuck_tac
) i st
fun fail_tac _ _ = if dbg then
Seq.of_list (rev (!stuck_list_ref))
else Seq.empty
in
rec_tac ORELSE' fail_tac
end
end
›
named_theorems_rev id_rules "Operation identification rules"
named_theorems_rev pat_rules "Operation pattern rules"
named_theorems_rev def_pat_rules "Definite operation pattern rules (not backtracked over)"
ML ‹
structure Id_Op = struct
fun id_a_conv cnv ct = case Thm.term_of ct of
@{mpat "ID _ _ _"} => Conv.fun_conv (Conv.fun_conv (Conv.arg_conv cnv)) ct
| _ => raise CTERM("id_a_conv",[ct])
fun
protect env (@{mpat "?t:::⇩i?I"}) = let
val t = protect env t
in
@{mk_term env: "?t:::⇩i?I"}
end
| protect _ (t as @{mpat "PR_CONST _"}) = t
| protect env (t1$t2) = let
val t1 = protect env t1
val t2 = protect env t2
in
@{mk_term env: "?t1.0 $ ?t2.0"}
end
| protect env (Abs (x,T,t)) = let
val t = protect (T::env) t
in
@{mk_term env: "λv_x::?'v_T. PROTECT2 ?t DUMMY"}
end
| protect _ t = t
fun protect_conv ctxt = Refine_Util.f_tac_conv ctxt
(protect [])
(simp_tac
(put_simpset HOL_basic_ss ctxt addsimps @{thms PROTECT2_def APP_def}) 1)
fun unprotect_conv ctxt
= Simplifier.rewrite (put_simpset HOL_basic_ss ctxt
addsimps @{thms PROTECT2_def APP_def})
fun do_unprotect_tac ctxt =
resolve_tac ctxt @{thms unprotect_rl1} THEN'
CONVERSION (Refine_Util.HOL_concl_conv (fn ctxt => id_a_conv (unprotect_conv ctxt)) ctxt)
val cfg_id_debug =
Attrib.setup_config_bool @{binding id_debug} (K false)
val cfg_id_trace_fallback =
Attrib.setup_config_bool @{binding id_trace_fallback} (K false)
fun dest_id_rl thm = case Thm.concl_of thm of
@{mpat (typs) "Trueprop (?c::⇩iTYPE(?'v_T))"} => (c,T)
| _ => raise THM("dest_id_rl",~1,[thm])
val add_id_rule = snd oo Thm.proof_attributes [Named_Theorems_Rev.add @{named_theorems_rev id_rules}]
datatype id_tac_mode = Init | Step | Normal | Solve
fun id_tac ss ctxt = let
open Id_Op_Tactical
val certT = Thm.ctyp_of ctxt
val cert = Thm.cterm_of ctxt
val thy = Proof_Context.theory_of ctxt
val id_rules = Named_Theorems_Rev.get ctxt @{named_theorems_rev id_rules}
val pat_rules = Named_Theorems_Rev.get ctxt @{named_theorems_rev pat_rules}
val def_pat_rules = Named_Theorems_Rev.get ctxt @{named_theorems_rev def_pat_rules}
val rl_net = Tactic.build_net (
(pat_rules |> map (fn thm => thm RS @{thm pat_rule}))
@ @{thms annot_rule app_rule app'_rule abs_rule}
@ (id_rules |> map (fn thm => thm RS @{thm id_rule}))
)
val def_rl_net = Tactic.build_net (
(def_pat_rules |> map (fn thm => thm RS @{thm pat_rule}))
)
val id_pr_const_rename_tac =
resolve_tac ctxt @{thms ID_PR_CONST_trigger} THEN'
Subgoal.FOCUS (fn { context=ctxt, prems, ... } =>
let
fun is_ID @{mpat "Trueprop (ID _ _ _)"} = true | is_ID _ = false
val prems = filter (Thm.prop_of #> is_ID) prems
val eqs = map (fn thm => thm RS @{thm ID_unfold_vars}) prems
val conv = Conv.rewrs_conv eqs
val conv = fn ctxt => (Conv.top_sweep_conv (K conv) ctxt)
val conv = fn ctxt => Conv.fun2_conv (Conv.arg_conv (conv ctxt))
val conv = Refine_Util.HOL_concl_conv conv ctxt
in CONVERSION conv 1 end
) ctxt THEN'
resolve_tac ctxt @{thms id_rule} THEN'
resolve_tac ctxt id_rules
val ityping = id_rules
|> map dest_id_rl
|> filter (is_Const o #1)
|> map (apfst (#1 o dest_Const))
|> Symtab.make_list
val has_type = Symtab.defined ityping
fun mk_fallback name cT =
case try (Sign.the_const_constraint thy) name of
SOME T => try (Thm.instantiate'
[SOME (certT cT), SOME (certT T)] [SOME (cert (Const (name,cT)))])
@{thm fallback_rule}
| NONE => NONE
fun trace_fallback thm =
Config.get ctxt cfg_id_trace_fallback
andalso let
open Pretty
val p = block [str "ID_OP: Applying fallback rule: ", Thm.pretty_thm ctxt thm]
in
string_of p |> tracing;
false
end
val fallback_tac = CONVERSION Thm.eta_conversion THEN' IF_EXGOAL (fn i => fn st =>
case Logic.concl_of_goal (Thm.prop_of st) i of
@{mpat "Trueprop (ID (mpaq_STRUCT (mpaq_Const ?name ?cT)) _ _)"} => (
if not (has_type name) then
case mk_fallback name cT of
SOME thm => (trace_fallback thm; resolve_tac ctxt [thm] i st)
| NONE => Seq.empty
else Seq.empty
)
| _ => Seq.empty)
val init_tac = CONVERSION (
Refine_Util.HOL_concl_conv (fn ctxt => (id_a_conv (protect_conv ctxt)))
ctxt
)
val step_tac = (FIRST' [
assume_tac ctxt,
eresolve_tac ctxt @{thms id_rule},
resolve_from_net_tac ctxt def_rl_net,
resolve_from_net_tac ctxt rl_net,
id_pr_const_rename_tac,
do_unprotect_tac ctxt,
fallback_tac])
val solve_tac = DF_SOLVE_FWD (Config.get ctxt cfg_id_debug) step_tac
in
case ss of
Init => init_tac
| Step => step_tac
| Normal => init_tac THEN' solve_tac
| Solve => solve_tac
end
end
›
subsection ‹Default Setup›
subsubsection ‹Numerals›
lemma pat_numeral[def_pat_rules]: "numeral$x ≡ UNPROTECT (numeral$x)" by simp
lemma id_nat_const[id_rules]: "(PR_CONST (a::nat)) ::⇩i TYPE(nat)" by simp
lemma id_int_const[id_rules]: "(PR_CONST (a::int)) ::⇩i TYPE(int)" by simp
end
Theory Sepref_Basic
section ‹Basic Definitions›
theory Sepref_Basic
imports
"HOL-Eisbach.Eisbach"
Separation_Logic_Imperative_HOL.Sep_Main
Refine_Monadic.Refine_Monadic
"Lib/Sepref_Misc"
"Lib/Structured_Apply"
Sepref_Id_Op
begin
no_notation i_ANNOT (infixr ":::⇩i" 10)
no_notation CONST_INTF (infixr "::⇩i" 10)
text ‹
In this theory, we define the basic concept of refinement
from a nondeterministic program specified in the
Isabelle Refinement Framework to an imperative deterministic one
specified in Imperative/HOL.
›
subsection ‹Values on Heap›
text ‹We tag every refinement assertion with the tag ‹hn_ctxt›, to
avoid higher-order unification problems when the refinement assertion
is schematic.›
definition hn_ctxt :: "('a⇒'c⇒assn) ⇒ 'a ⇒ 'c ⇒ assn"
where
"hn_ctxt P a c ≡ P a c"
definition pure :: "('b × 'a) set ⇒ 'a ⇒ 'b ⇒ assn"
where "pure R ≡ (λa c. ↑((c,a)∈R))"
lemma pure_app_eq: "pure R a c = ↑((c,a)∈R)" by (auto simp: pure_def)
lemma pure_eq_conv[simp]: "pure R = pure R' ⟷ R=R'"
unfolding pure_def
apply (rule iffI)
apply safe
apply (meson pure_assn_eq_conv)
apply (meson pure_assn_eq_conv)
done
lemma pure_rel_eq_false_iff: "pure R x y = false ⟷ (y,x)∉R"
by (auto simp: pure_def)
definition "is_pure P ≡ ∃P'. ∀x x'. P x x'=↑(P' x x')"
lemma is_pureI[intro?]:
assumes "⋀x x'. P x x' = ↑(P' x x')"
shows "is_pure P"
using assms unfolding is_pure_def by blast
lemma is_pureE:
assumes "is_pure P"
obtains P' where "⋀x x'. P x x' = ↑(P' x x')"
using assms unfolding is_pure_def by blast
lemma pure_pure[simp]: "is_pure (pure P)"
unfolding pure_def by rule blast
lemma pure_hn_ctxt[intro!]: "is_pure P ⟹ is_pure (hn_ctxt P)"
unfolding hn_ctxt_def[abs_def] .
definition "the_pure P ≡ THE P'. ∀x x'. P x x'=↑((x',x)∈P')"
lemma the_pure_pure[simp]: "the_pure (pure R) = R"
unfolding pure_def the_pure_def
by (rule theI2[where a=R]) auto
lemma is_pure_alt_def: "is_pure R ⟷ (∃Ri. ∀x y. R x y = ↑((y,x)∈Ri))"
unfolding is_pure_def
apply auto
apply (rename_tac P')
apply (rule_tac x="{(x,y). P' y x}" in exI)
apply auto
done
lemma pure_the_pure[simp]: "is_pure R ⟹ pure (the_pure R) = R"
unfolding is_pure_alt_def pure_def the_pure_def
apply (intro ext)
apply clarsimp
apply (rename_tac a c Ri)
apply (rule_tac a=Ri in theI2)
apply auto
done
lemma is_pure_conv: "is_pure R ⟷ (∃R'. R = pure R')"
unfolding pure_def is_pure_alt_def by force
lemma is_pure_the_pure_id_eq[simp]: "is_pure R ⟹ the_pure R = Id ⟷ R=pure Id"
by (auto simp: is_pure_conv)
lemma is_pure_iff_pure_assn: "is_pure P = (∀x x'. is_pure_assn (P x x'))"
unfolding is_pure_def is_pure_assn_def by metis
abbreviation "hn_val R ≡ hn_ctxt (pure R)"
lemma hn_val_unfold: "hn_val R a b = ↑((b,a)∈R)"
by (simp add: hn_ctxt_def pure_def)
definition "invalid_assn R x y ≡ ↑(∃h. h⊨R x y) * true"
abbreviation "hn_invalid R ≡ hn_ctxt (invalid_assn R)"
lemma invalidate_clone: "R x y ⟹⇩A invalid_assn R x y * R x y"
apply (rule entailsI)
unfolding invalid_assn_def
apply (auto simp: models_in_range mod_star_trueI)
done
lemma invalidate_clone': "R x y ⟹⇩A invalid_assn R x y * R x y * true"
apply (rule entailsI)
unfolding invalid_assn_def
apply (auto simp: models_in_range mod_star_trueI)
done
lemma invalidate: "R x y ⟹⇩A invalid_assn R x y"
apply (rule entailsI)
unfolding invalid_assn_def
apply (auto simp: models_in_range mod_star_trueI)
done
lemma invalid_pure_recover: "invalid_assn (pure R) x y = pure R x y * true"
apply (rule ent_iffI)
subgoal
apply (rule entailsI)
unfolding invalid_assn_def
by (auto simp: pure_def)
subgoal
unfolding invalid_assn_def
by (auto simp: pure_def)
done
lemma hn_invalidI: "h⊨hn_ctxt P x y ⟹ hn_invalid P x y = true"
apply (cases h)
apply (rule ent_iffI)
apply (auto simp: invalid_assn_def hn_ctxt_def)
done
lemma invalid_assn_cong[cong]:
assumes "x≡x'"
assumes "y≡y'"
assumes "R x' y' ≡ R' x' y'"
shows "invalid_assn R x y = invalid_assn R' x' y'"
using assms unfolding invalid_assn_def
by simp
subsection ‹Constraints in Refinement Relations›
lemma mod_pure_conv[simp]: "(h,as)⊨pure R a b ⟷ (as={} ∧ (b,a)∈R)"
by (auto simp: pure_def)
definition rdomp :: "('a ⇒ 'c ⇒ assn) ⇒ 'a ⇒ bool" where
"rdomp R a ≡ ∃h c. h ⊨ R a c"
abbreviation "rdom R ≡ Collect (rdomp R)"
lemma rdomp_ctxt[simp]: "rdomp (hn_ctxt R) = rdomp R"
by (simp add: hn_ctxt_def[abs_def])
lemma rdomp_pure[simp]: "rdomp (pure R) a ⟷ a∈Range R"
unfolding rdomp_def pure_def by auto
lemma rdom_pure[simp]: "rdom (pure R) = Range R"
unfolding rdomp_def[abs_def] pure_def by auto
lemma Range_of_constraint_conv[simp]: "Range (A∩UNIV×C) = Range A ∩ C"
by auto
subsection ‹Heap-Nres Refinement Calculus›
text ‹Predicate that expresses refinement. Given a heap
‹Γ›, program ‹c› produces a heap ‹Γ'› and
a concrete result that is related with predicate ‹R› to some
abstract result from ‹m››
definition "hn_refine Γ c Γ' R m ≡ nofail m ⟶
<Γ> c <λr. Γ' * (∃⇩Ax. R x r * ↑(RETURN x ≤ m)) >⇩t"
simproc_setup assn_simproc_hnr ("hn_refine Γ c Γ'")
= ‹K Seplogic_Auto.assn_simproc_fun›
lemma hn_refineI[intro?]:
assumes "nofail m
⟹ <Γ> c <λr. Γ' * (∃⇩Ax. R x r * ↑(RETURN x ≤ m)) >⇩t"
shows "hn_refine Γ c Γ' R m"
using assms unfolding hn_refine_def by blast
lemma hn_refineD:
assumes "hn_refine Γ c Γ' R m"
assumes "nofail m"
shows "<Γ> c <λr. Γ' * (∃⇩Ax. R x r * ↑(RETURN x ≤ m)) >⇩t"
using assms unfolding hn_refine_def by blast
lemma hn_refine_preI:
assumes "⋀h. h⊨Γ ⟹ hn_refine Γ c Γ' R a"
shows "hn_refine Γ c Γ' R a"
using assms unfolding hn_refine_def
by (auto intro: hoare_triple_preI)
lemma hn_refine_nofailI:
assumes "nofail a ⟹ hn_refine Γ c Γ' R a"
shows "hn_refine Γ c Γ' R a"
using assms by (auto simp: hn_refine_def)
lemma hn_refine_false[simp]: "hn_refine false c Γ' R m"
by rule auto
lemma hn_refine_fail[simp]: "hn_refine Γ c Γ' R FAIL"
by rule auto
lemma hn_refine_frame:
assumes "hn_refine P' c Q' R m"
assumes "P ⟹⇩t F * P'"
shows "hn_refine P c (F * Q') R m"
using assms
unfolding hn_refine_def entailst_def
apply clarsimp
apply (erule cons_pre_rule)
apply (rule cons_post_rule)
apply (erule fi_rule, frame_inference)
apply (simp only: star_aci)
apply simp
done
lemma hn_refine_cons:
assumes I: "P⟹⇩tP'"
assumes R: "hn_refine P' c Q R m"
assumes I': "Q⟹⇩tQ'"
assumes R': "⋀x y. R x y ⟹⇩t R' x y"
shows "hn_refine P c Q' R' m"
using R unfolding hn_refine_def
apply clarify
apply (rule cons_pre_rulet[OF I])
apply (rule cons_post_rulet)
apply assumption
apply (sep_auto simp: entailst_def)
apply (rule enttD)
apply (intro entt_star_mono I' R')
done
lemma hn_refine_cons_pre:
assumes I: "P⟹⇩tP'"
assumes R: "hn_refine P' c Q R m"
shows "hn_refine P c Q R m"
by (rule hn_refine_cons[OF I R]) sep_auto+
lemma hn_refine_cons_post:
assumes R: "hn_refine P c Q R m"
assumes I: "Q⟹⇩tQ'"
shows "hn_refine P c Q' R m"
using assms
by (rule hn_refine_cons[OF entt_refl _ _ entt_refl])
lemma hn_refine_cons_res:
"⟦ hn_refine Γ f Γ' R g; ⋀a c. R a c ⟹⇩t R' a c ⟧ ⟹ hn_refine Γ f Γ' R' g"
by (erule hn_refine_cons[OF entt_refl]) sep_auto+
lemma hn_refine_ref:
assumes LE: "m≤m'"
assumes R: "hn_refine P c Q R m"
shows "hn_refine P c Q R m'"
apply rule
apply (rule cons_post_rule)
apply (rule hn_refineD[OF R])
using LE apply (simp add: pw_le_iff)
apply (sep_auto intro: order_trans[OF _ LE])
done
lemma hn_refine_cons_complete:
assumes I: "P⟹⇩tP'"
assumes R: "hn_refine P' c Q R m"
assumes I': "Q⟹⇩tQ'"
assumes R': "⋀x y. R x y ⟹⇩t R' x y"
assumes LE: "m≤m'"
shows "hn_refine P c Q' R' m'"
apply (rule hn_refine_ref[OF LE])
apply (rule hn_refine_cons[OF I R I' R'])
done
lemma hn_refine_augment_res:
assumes A: "hn_refine Γ f Γ' R g"
assumes B: "g ≤⇩n SPEC Φ"
shows "hn_refine Γ f Γ' (λa c. R a c * ↑(Φ a)) g"
apply (rule hn_refineI)
apply (rule cons_post_rule)
apply (erule A[THEN hn_refineD])
using B
apply (sep_auto simp: pw_le_iff pw_leof_iff)
done
subsection ‹Product Types›
text ‹Some notion for product types is already defined here, as it is used
for currying and uncurrying, which is fundamental for the sepref tool›
definition prod_assn :: "('a1⇒'c1⇒assn) ⇒ ('a2⇒'c2⇒assn)
⇒ 'a1*'a2 ⇒ 'c1*'c2 ⇒ assn" where
"prod_assn P1 P2 a c ≡ case (a,c) of ((a1,a2),(c1,c2)) ⇒
P1 a1 c1 * P2 a2 c2"
notation prod_assn (infixr "×⇩a" 70)
lemma prod_assn_pure_conv[simp]: "prod_assn (pure R1) (pure R2) = pure (R1 ×⇩r R2)"
by (auto simp: pure_def prod_assn_def intro!: ext)
lemma prod_assn_pair_conv[simp]:
"prod_assn A B (a1,b1) (a2,b2) = A a1 a2 * B b1 b2"
unfolding prod_assn_def by auto
lemma prod_assn_true[simp]: "prod_assn (λ_ _. true) (λ_ _. true) = (λ_ _. true)"
by (auto intro!: ext simp: hn_ctxt_def prod_assn_def)
subsection "Convenience Lemmas"
lemma hn_refine_guessI:
assumes "hn_refine P f P' R f'"
assumes "f=f_conc"
shows "hn_refine P f_conc P' R f'"
using assms by simp
lemma imp_correctI:
assumes R: "hn_refine Γ c Γ' R a"
assumes C: "a ≤ SPEC Φ"
shows "<Γ> c <λr'. ∃⇩Ar. Γ' * R r r' * ↑(Φ r)>⇩t"
apply (rule cons_post_rule)
apply (rule hn_refineD[OF R])
apply (rule le_RES_nofailI[OF C])
apply (sep_auto dest: order_trans[OF _ C])
done
lemma hnr_pre_ex_conv:
shows "hn_refine (∃⇩Ax. Γ x) c Γ' R a ⟷ (∀x. hn_refine (Γ x) c Γ' R a)"
unfolding hn_refine_def
apply safe
apply (erule cons_pre_rule[rotated])
apply (rule ent_ex_postI)
apply (rule ent_refl)
apply sep_auto
done
lemma hnr_pre_pure_conv:
shows "hn_refine (Γ * ↑P) c Γ' R a ⟷ (P ⟶ hn_refine Γ c Γ' R a)"
unfolding hn_refine_def
by auto
lemma hn_refine_split_post:
assumes "hn_refine Γ c Γ' R a"
shows "hn_refine Γ c (Γ' ∨⇩A Γ'') R a"
apply (rule hn_refine_cons_post[OF assms])
by (rule entt_disjI1_direct)
lemma hn_refine_post_other:
assumes "hn_refine Γ c Γ'' R a"
shows "hn_refine Γ c (Γ' ∨⇩A Γ'') R a"
apply (rule hn_refine_cons_post[OF assms])
by (rule entt_disjI2_direct)
subsubsection ‹Return›
lemma hnr_RETURN_pass:
"hn_refine (hn_ctxt R x p) (return p) (hn_invalid R x p) R (RETURN x)"
apply rule
apply (sep_auto simp: hn_ctxt_def eintros: invalidate_clone')
done
lemma hnr_RETURN_pure:
assumes "(c,a)∈R"
shows "hn_refine emp (return c) emp (pure R) (RETURN a)"
unfolding hn_refine_def using assms
by (sep_auto simp: pure_def)
subsubsection ‹Assertion›
lemma hnr_FAIL[simp, intro!]: "hn_refine Γ c Γ' R FAIL"
unfolding hn_refine_def
by simp
lemma hnr_ASSERT:
assumes "Φ ⟹ hn_refine Γ c Γ' R c'"
shows "hn_refine Γ c Γ' R (do { ASSERT Φ; c'})"
using assms
apply (cases Φ)
by auto
subsubsection ‹Bind›
lemma bind_det_aux: "⟦ RETURN x ≤ m; RETURN y ≤ f x ⟧ ⟹ RETURN y ≤ m ⤜ f"
apply (rule order_trans[rotated])
apply (rule Refine_Basic.bind_mono)
apply assumption
apply (rule order_refl)
apply simp
done
lemma hnr_bind:
assumes D1: "hn_refine Γ m' Γ1 Rh m"
assumes D2:
"⋀x x'. RETURN x ≤ m ⟹ hn_refine (Γ1 * hn_ctxt Rh x x') (f' x') (Γ2 x x') R (f x)"
assumes IMP: "⋀x x'. Γ2 x x' ⟹⇩t Γ' * hn_ctxt Rx x x'"
shows "hn_refine Γ (m'⤜f') Γ' R (m⤜f)"
using assms
unfolding hn_refine_def
apply (clarsimp simp add: pw_bind_nofail)
apply (rule Hoare_Triple.bind_rule)
apply assumption
apply (clarsimp intro!: normalize_rules simp: hn_ctxt_def)
proof -
fix x' x
assume 1: "RETURN x ≤ m"
and "nofail m" "∀x. inres m x ⟶ nofail (f x)"
hence "nofail (f x)" by (auto simp: pw_le_iff)
moreover assume "⋀x x'. RETURN x ≤ m ⟹
nofail (f x) ⟶ <Γ1 * Rh x x'> f' x'
<λr'. ∃⇩Ar. Γ2 x x' * R r r' * true * ↑ (RETURN r ≤ f x)>"
ultimately have "⋀x'. <Γ1 * Rh x x'> f' x'
<λr'. ∃⇩Ar. Γ2 x x' * R r r' * true * ↑ (RETURN r ≤ f x)>"
using 1 by simp
also have "⋀r'. ∃⇩Ar. Γ2 x x' * R r r' * true * ↑ (RETURN r ≤ f x) ⟹⇩A
∃⇩Ar. Γ' * R r r' * true * ↑ (RETURN r ≤ f x)"
apply (sep_auto)
apply (rule ent_frame_fwd[OF IMP[THEN enttD]])
apply frame_inference
apply (solve_entails)
done
finally (cons_post_rule) have
R: "<Γ1 * Rh x x'> f' x'
<λr'. ∃⇩Ar. Γ' * R r r' * true * ↑(RETURN r ≤ f x)>"
.
show "<Γ1 * Rh x x' * true> f' x'
<λr'. ∃⇩Ar. Γ' * R r r' * true * ↑ (RETURN r ≤ m ⤜ f)>"
by (sep_auto heap: R intro: bind_det_aux[OF 1])
qed
subsubsection ‹Recursion›
definition "hn_rel P m ≡ λr. ∃⇩Ax. P x r * ↑(RETURN x ≤ m)"
lemma hn_refine_alt: "hn_refine Fpre c Fpost P m ≡ nofail m ⟶
<Fpre> c <λr. hn_rel P m r * Fpost>⇩t"
apply (rule eq_reflection)
unfolding hn_refine_def hn_rel_def
apply (simp add: hn_ctxt_def)
apply (simp only: star_aci)
done
lemma wit_swap_forall:
assumes W: "<P> c <λ_. true>"
assumes T: "(∀x. A x ⟶ <P> c <Q x>)"
shows "<P> c <λr. ¬⇩A (∃⇩Ax. ↑(A x) * ¬⇩A Q x r)>"
unfolding hoare_triple_def Let_def
apply (intro conjI impI allI)
subgoal by (elim conjE) (rule hoare_tripleD[OF W], assumption+) []
subgoal
apply (clarsimp, intro conjI allI)
apply1 (rule models_in_range)
applyS (rule hoare_tripleD[OF W]; assumption; fail)
apply1 (simp only: disj_not2, intro impI)
apply1 (drule spec[OF T, THEN mp])
apply1 (drule (2) hoare_tripleD(2))
by assumption
subgoal by (elim conjE) (rule hoare_tripleD[OF W], assumption+)
subgoal by (elim conjE) (rule hoare_tripleD[OF W], assumption+)
done
lemma hn_admissible:
assumes PREC: "precise Ry"
assumes E: "∀f∈A. nofail (f x) ⟶ <P> c <λr. hn_rel Ry (f x) r * F>"
assumes NF: "nofail (INF f∈A. f x)"
shows "<P> c <λr. hn_rel Ry (INF f∈A. f x) r * F>"
proof -
from NF obtain f where "f∈A" and "nofail (f x)"
by (simp only: refine_pw_simps) blast
with E have "<P> c <λr. hn_rel Ry (f x) r * F>" by blast
hence W: "<P> c <λ_. true>" by (rule cons_post_rule, simp)
from E have
E': "∀f. f∈A ∧ nofail (f x) ⟶ <P> c <λr. hn_rel Ry (f x) r * F>"
by blast
from wit_swap_forall[OF W E'] have
E'': "<P> c
<λr. ¬⇩A (∃⇩Axa. ↑ (xa ∈ A ∧ nofail (xa x)) *
¬⇩A (hn_rel Ry (xa x) r * F))>" .
thus ?thesis
apply (rule cons_post_rule)
unfolding entails_def hn_rel_def
apply clarsimp
proof -
fix h as p
assume A: "∀f. f∈A ⟶ (∃a.
((h, as) ⊨ Ry a p * F ∧ RETURN a ≤ f x)) ∨ ¬ nofail (f x)"
with ‹f∈A› and ‹nofail (f x)› obtain a where
1: "(h, as) ⊨ Ry a p * F" and "RETURN a ≤ f x"
by blast
have
"∀f∈A. nofail (f x) ⟶ (h, as) ⊨ Ry a p * F ∧ RETURN a ≤ f x"
proof clarsimp
fix f'
assume "f'∈A" and "nofail (f' x)"
with A obtain a' where
2: "(h, as) ⊨ Ry a' p * F" and "RETURN a' ≤ f' x"
by blast
moreover note preciseD'[OF PREC 1 2]
ultimately show "(h, as) ⊨ Ry a p * F ∧ RETURN a ≤ f' x" by simp
qed
hence "RETURN a ≤ (INF f∈A. f x)"
by (metis (mono_tags) le_INF_iff le_nofailI)
with 1 show "∃a. (h, as) ⊨ Ry a p * F ∧ RETURN a ≤ (INF f∈A. f x)"
by blast
qed
qed
lemma hn_admissible':
assumes PREC: "precise Ry"
assumes E: "∀f∈A. nofail (f x) ⟶ <P> c <λr. hn_rel Ry (f x) r * F>⇩t"
assumes NF: "nofail (INF f∈A. f x)"
shows "<P> c <λr. hn_rel Ry (INF f∈A. f x) r * F>⇩t"
apply (rule hn_admissible[OF PREC, where F="F*true", simplified])
apply simp
by fact+
lemma hnr_RECT_old:
assumes S: "⋀cf af ax px. ⟦
⋀ax px. hn_refine (hn_ctxt Rx ax px * F) (cf px) (F' ax px) Ry (af ax)⟧
⟹ hn_refine (hn_ctxt Rx ax px * F) (cB cf px) (F' ax px) Ry (aB af ax)"
assumes M: "(⋀x. mono_Heap (λf. cB f x))"
assumes PREC: "precise Ry"
shows "hn_refine
(hn_ctxt Rx ax px * F) (heap.fixp_fun cB px) (F' ax px) Ry (RECT aB ax)"
unfolding RECT_gfp_def
proof (simp, intro conjI impI)
assume "trimono aB"
hence "mono aB" by (simp add: trimonoD)
have "∀ax px.
hn_refine (hn_ctxt Rx ax px * F) (heap.fixp_fun cB px) (F' ax px) Ry
(gfp aB ax)"
apply (rule gfp_cadm_induct[OF _ _ ‹mono aB›])
apply rule
apply (auto simp: hn_refine_alt intro: hn_admissible'[OF PREC]) []
apply (auto simp: hn_refine_alt) []
apply clarsimp
apply (subst heap.mono_body_fixp[of cB, OF M])
apply (rule S)
apply blast
done
thus "hn_refine (hn_ctxt Rx ax px * F)
(ccpo.fixp (fun_lub Heap_lub) (fun_ord Heap_ord) cB px) (F' ax px) Ry
(gfp aB ax)" by simp
qed
lemma hnr_RECT:
assumes S: "⋀cf af ax px. ⟦
⋀ax px. hn_refine (hn_ctxt Rx ax px * F) (cf px) (F' ax px) Ry (af ax)⟧
⟹ hn_refine (hn_ctxt Rx ax px * F) (cB cf px) (F' ax px) Ry (aB af ax)"
assumes M: "(⋀x. mono_Heap (λf. cB f x))"
shows "hn_refine
(hn_ctxt Rx ax px * F) (heap.fixp_fun cB px) (F' ax px) Ry (RECT aB ax)"
unfolding RECT_def
proof (simp, intro conjI impI)
assume "trimono aB"
hence "flatf_mono_ge aB" by (simp add: trimonoD)
have "∀ax px.
hn_refine (hn_ctxt Rx ax px * F) (heap.fixp_fun cB px) (F' ax px) Ry
(flatf_gfp aB ax)"
apply (rule flatf_ord.fixp_induct[OF _ ‹flatf_mono_ge aB›])
apply (rule flatf_admissible_pointwise)
apply simp
apply (auto simp: hn_refine_alt) []
apply clarsimp
apply (subst heap.mono_body_fixp[of cB, OF M])
apply (rule S)
apply blast
done
thus "hn_refine (hn_ctxt Rx ax px * F)
(ccpo.fixp (fun_lub Heap_lub) (fun_ord Heap_ord) cB px) (F' ax px) Ry
(flatf_gfp aB ax)" by simp
qed
lemma hnr_If:
assumes P: "Γ ⟹⇩t Γ1 * hn_val bool_rel a a'"
assumes RT: "a ⟹ hn_refine (Γ1 * hn_val bool_rel a a') b' Γ2b R b"
assumes RE: "¬a ⟹ hn_refine (Γ1 * hn_val bool_rel a a') c' Γ2c R c"
assumes IMP: "Γ2b ∨⇩A Γ2c ⟹⇩t Γ'"
shows "hn_refine Γ (if a' then b' else c') Γ' R (if a then b else c)"
apply (rule hn_refine_cons[OF P])
apply1 (rule hn_refine_preI)
applyF (cases a; simp add: hn_ctxt_def pure_def)
focus
apply1 (rule hn_refine_split_post)
applyF (rule hn_refine_cons_pre[OF _ RT])
applyS (simp add: hn_ctxt_def pure_def)
applyS simp
solved
solved
apply1 (rule hn_refine_post_other)
applyF (rule hn_refine_cons_pre[OF _ RE])
applyS (simp add: hn_ctxt_def pure_def)
applyS simp
solved
solved
applyS (rule IMP)
applyS (rule entt_refl)
done
subsection ‹ML-Level Utilities›
ML ‹
signature SEPREF_BASIC = sig
val dest_lambda_rc: Proof.context -> term -> ((term * (term -> term)) * Proof.context)
val apply_under_lambda: (Proof.context -> term -> term) -> Proof.context -> term -> term
val is_nresT: typ -> bool
val mk_nresT: typ -> typ
val dest_nresT: typ -> typ
val mk_cequals: cterm * cterm -> cterm
val mk_entails: term * term -> term
val constrain_type_pre: typ -> term -> term
val mk_pair_in_pre: term -> term -> term -> term
val mk_compN_pre: int -> term -> term -> term
val mk_curry0_pre: term -> term
val mk_curry_pre: term -> term
val mk_curryN_pre: int -> term -> term
val mk_uncurry0_pre: term -> term
val mk_uncurry_pre: term -> term
val mk_uncurryN_pre: int -> term -> term
val hn_refine_conv: conv -> conv -> conv -> conv -> conv -> conv
val hn_refine_conv_a: conv -> conv
val hn_refine_concl_conv_a: (Proof.context -> conv) -> Proof.context -> conv
val dest_hn_refine: term -> term * term * term * term * term
val mk_hn_refine: term * term * term * term * term -> term
val is_hn_refine_concl: term -> bool
val dest_hnr_absfun: term -> bool * (term * term list)
val mk_hnr_absfun: bool * (term * term list) -> term
val mk_hnr_absfun': (term * term list) -> term
val star_permute_tac: Proof.context -> tactic
val mk_star: term * term -> term
val list_star: term list -> term
val strip_star: term -> term list
val is_true: term -> bool
val is_hn_ctxt: term -> bool
val dest_hn_ctxt: term -> term * term * term
val dest_hn_ctxt_opt: term -> (term * term * term) option
type phases_ctrl = {
trace: bool,
int_res: bool,
start: string option,
stop: string option
}
val dflt_phases_ctrl: phases_ctrl
val dbg_phases_ctrl: phases_ctrl
val flag_phases_ctrl: bool -> phases_ctrl
type phase = string * (Proof.context -> tactic') * int
val PHASES': phase list -> phases_ctrl -> Proof.context -> tactic'
end
structure Sepref_Basic: SEPREF_BASIC = struct
fun is_nresT (Type (@{type_name nres},[_])) = true | is_nresT _ = false
fun mk_nresT T = Type(@{type_name nres},[T])
fun dest_nresT (Type (@{type_name nres},[T])) = T | dest_nresT T = raise TYPE("dest_nresT",[T],[])
fun dest_lambda_rc ctxt (Abs (x,T,t)) = let
val (u,ctxt) = yield_singleton Variable.variant_fixes x ctxt
val u = Free (u,T)
val t = subst_bound (u,t)
val reconstruct = Term.lambda_name (x,u)
in
((t,reconstruct),ctxt)
end
| dest_lambda_rc _ t = raise TERM("dest_lambda_rc",[t])
fun apply_under_lambda f ctxt t = let
val ((t,rc),ctxt) = dest_lambda_rc ctxt t
val t = f ctxt t
in
rc t
end
fun mk_pair_in_pre x y r = Const (@{const_name Set.member}, dummyT) $
(Const (@{const_name Product_Type.Pair}, dummyT) $ x $ y) $ r
fun mk_uncurry_pre t = Const(@{const_name uncurry}, dummyT)$t
fun mk_uncurry0_pre t = Const(@{const_name uncurry0}, dummyT)$t
fun mk_uncurryN_pre 0 = mk_uncurry0_pre
| mk_uncurryN_pre 1 = I
| mk_uncurryN_pre n = mk_uncurry_pre o mk_uncurryN_pre (n-1)
fun mk_curry_pre t = Const(@{const_name curry}, dummyT)$t
fun mk_curry0_pre t = Const(@{const_name curry0}, dummyT)$t
fun mk_curryN_pre 0 = mk_curry0_pre
| mk_curryN_pre 1 = I
| mk_curryN_pre n = mk_curry_pre o mk_curryN_pre (n-1)
fun mk_compN_pre 0 f g = f $ g
| mk_compN_pre n f g = let
val g = fold (fn i => fn t => t$Bound i) (n-2 downto 0) g
val t = Const(@{const_name "Fun.comp"},dummyT) $ f $ g
val t = fold (fn i => fn t => Abs ("x"^string_of_int i,dummyT,t)) (n-1 downto 1) t
in
t
end
fun constrain_type_pre T t = Const(@{syntax_const "_type_constraint_"},T-->T) $ t
local open Conv in
fun hn_refine_conv c1 c2 c3 c4 c5 ct = case Thm.term_of ct of
@{mpat "hn_refine _ _ _ _ _"} => let
val cc = combination_conv
in
cc (cc (cc (cc (cc all_conv c1) c2) c3) c4) c5 ct
end
| _ => raise CTERM ("hn_refine_conv",[ct])
val hn_refine_conv_a = hn_refine_conv all_conv all_conv all_conv all_conv
fun hn_refine_concl_conv_a conv ctxt = Refine_Util.HOL_concl_conv
(fn ctxt => hn_refine_conv_a (conv ctxt)) ctxt
end
val mk_cequals = uncurry SMT_Util.mk_cequals
val mk_entails = HOLogic.mk_binrel @{const_name "entails"}
val mk_star = HOLogic.mk_binop @{const_name "Groups.times_class.times"}
fun list_star [] = @{term "emp::assn"}
| list_star [a] = a
| list_star (a::l) = mk_star (list_star l,a)
fun strip_star @{mpat "?a*?b"} = strip_star a @ strip_star b
| strip_star @{mpat "emp"} = []
| strip_star t = [t]
fun is_true @{mpat "true"} = true | is_true _ = false
fun is_hn_ctxt @{mpat "hn_ctxt _ _ _"} = true | is_hn_ctxt _ = false
fun dest_hn_ctxt @{mpat "hn_ctxt ?R ?a ?p"} = (R,a,p)
| dest_hn_ctxt t = raise TERM("dest_hn_ctxt",[t])
fun dest_hn_ctxt_opt @{mpat "hn_ctxt ?R ?a ?p"} = SOME (R,a,p)
| dest_hn_ctxt_opt _ = NONE
fun strip_abs_args (t as @{mpat "PR_CONST _"}) = (t,[])
| strip_abs_args @{mpat "?f$?a"} = (case strip_abs_args f of (f,args) => (f,args@[a]))
| strip_abs_args t = (t,[])
fun dest_hnr_absfun @{mpat "RETURN$?a"} = (true, strip_abs_args a)
| dest_hnr_absfun f = (false, strip_abs_args f)
fun mk_hnr_absfun (true,fa) = Autoref_Tagging.list_APP fa |> (fn a => @{mk_term "RETURN$?a"})
| mk_hnr_absfun (false,fa) = Autoref_Tagging.list_APP fa
fun mk_hnr_absfun' fa = let
val t = Autoref_Tagging.list_APP fa
val T = fastype_of t
in
case T of
Type (@{type_name nres},_) => t
| _ => @{mk_term "RETURN$?t"}
end
fun dest_hn_refine @{mpat "hn_refine ?P ?c ?Q ?R ?a"} = (P,c,Q,R,a)
| dest_hn_refine t = raise TERM("dest_hn_refine",[t])
fun mk_hn_refine (P,c,Q,R,a) = @{mk_term "hn_refine ?P ?c ?Q ?R ?a"}
val is_hn_refine_concl = can (HOLogic.dest_Trueprop #> dest_hn_refine)
fun star_permute_tac ctxt = ALLGOALS (simp_tac (put_simpset HOL_basic_ss ctxt addsimps @{thms star_aci}))
type phases_ctrl = {
trace: bool,
int_res: bool,
start: string option,
stop: string option
}
val dflt_phases_ctrl = {trace=false,int_res=false,start=NONE,stop=NONE}
val dbg_phases_ctrl = {trace=true,int_res=true,start=NONE,stop=NONE}
fun flag_phases_ctrl dbg = if dbg then dbg_phases_ctrl else dflt_phases_ctrl
type phase = string * (Proof.context -> tactic') * int
local
fun ph_range phases start stop = let
fun find_phase name = let
val i = find_index (fn (n,_,_) => n=name) phases
val _ = if i<0 then error ("No such phase: " ^ name) else ()
in
i
end
val i = case start of NONE => 0 | SOME n => find_phase n
val j = case stop of NONE => length phases - 1 | SOME n => find_phase n
val phases = take (j+1) phases |> drop i
val _ = case phases of [] => error "No phases selected, range is empty" | _ => ()
in
phases
end
in
fun PHASES' phases ctrl ctxt = let
val phases = ph_range phases (#start ctrl) (#stop ctrl)
val phases = map (fn (n,tac,d) => (n,tac ctxt,d)) phases
fun r [] _ st = Seq.single st
| r ((name,tac,d)::tacs) i st = let
val n = Thm.nprems_of st
val bailout_tac = if #int_res ctrl then all_tac else no_tac
fun trace_tac msg st = (if #trace ctrl then tracing msg else (); Seq.single st)
val trace_start_tac = trace_tac ("Phase " ^ name)
in
K trace_start_tac THEN' IF_EXGOAL (tac)
THEN_ELSE' (
fn i => fn st =>
if Thm.nprems_of st = n+d then
((trace_tac " Done" THEN r tacs i) st)
else
(trace_tac "*** Wrong number of produced goals" THEN bailout_tac) st
,
K (trace_tac "*** Phase tactic failed" THEN bailout_tac))
end i st
in
r phases
end
end
end
signature SEPREF_DEBUGGING = sig
val cfg_debug_all: bool Config.T
val is_debug: bool Config.T -> Proof.context -> bool
val is_debug': Proof.context -> bool
val DBG_CONVERSION: bool Config.T -> Proof.context -> conv -> tactic'
val DBG_CONVERSION': Proof.context -> conv -> tactic'
val tracing_tac': string -> Proof.context -> tactic'
val warning_tac': string -> Proof.context -> tactic'
val error_tac': string -> Proof.context -> tactic'
val dbg_trace_msg: bool Config.T -> Proof.context -> string -> unit
val dbg_trace_msg': Proof.context -> string -> unit
val dbg_msg_tac: bool Config.T -> (Proof.context -> int -> thm -> string) -> Proof.context -> tactic'
val dbg_msg_tac': (Proof.context -> int -> thm -> string) -> Proof.context -> tactic'
val msg_text: string -> Proof.context -> int -> thm -> string
val msg_subgoal: string -> Proof.context -> int -> thm -> string
val msg_from_subgoal: string -> (term -> Proof.context -> string) -> Proof.context -> int -> thm -> string
val msg_allgoals: string -> Proof.context -> int -> thm -> string
end
structure Sepref_Debugging: SEPREF_DEBUGGING = struct
val cfg_debug_all =
Attrib.setup_config_bool @{binding sepref_debug_all} (K false)
fun is_debug cfg ctxt = Config.get ctxt cfg orelse Config.get ctxt cfg_debug_all
fun is_debug' ctxt = Config.get ctxt cfg_debug_all
fun dbg_trace cfg ctxt obj =
if is_debug cfg ctxt then
tracing (@{make_string} obj)
else ()
fun dbg_trace' ctxt obj =
if is_debug' ctxt then
tracing (@{make_string} obj)
else ()
fun dbg_trace_msg cfg ctxt msg =
if is_debug cfg ctxt then
tracing msg
else ()
fun dbg_trace_msg' ctxt msg =
if is_debug' ctxt then
tracing msg
else ()
fun DBG_CONVERSION cfg ctxt cv i st =
Seq.single (Conv.gconv_rule cv i st)
handle e as THM _ => (dbg_trace cfg ctxt e; Seq.empty)
| e as CTERM _ => (dbg_trace cfg ctxt e; Seq.empty)
| e as TERM _ => (dbg_trace cfg ctxt e; Seq.empty)
| e as TYPE _ => (dbg_trace cfg ctxt e; Seq.empty);
fun DBG_CONVERSION' ctxt cv i st =
Seq.single (Conv.gconv_rule cv i st)
handle e as THM _ => (dbg_trace' ctxt e; Seq.empty)
| e as CTERM _ => (dbg_trace' ctxt e; Seq.empty)
| e as TERM _ => (dbg_trace' ctxt e; Seq.empty)
| e as TYPE _ => (dbg_trace' ctxt e; Seq.empty);
local
fun gen_subgoal_msg_tac do_msg msg ctxt = IF_EXGOAL (fn i => fn st => let
val t = nth (Thm.prems_of st) (i-1)
val _ = Pretty.block [Pretty.str msg, Pretty.fbrk, Syntax.pretty_term ctxt t]
|> Pretty.string_of |> do_msg
in
Seq.single st
end)
in
val tracing_tac' = gen_subgoal_msg_tac tracing
val warning_tac' = gen_subgoal_msg_tac warning
val error_tac' = gen_subgoal_msg_tac error
end
fun dbg_msg_tac cfg msg ctxt =
if is_debug cfg ctxt then (fn i => fn st => (tracing (msg ctxt i st); Seq.single st))
else K all_tac
fun dbg_msg_tac' msg ctxt =
if is_debug' ctxt then (fn i => fn st => (tracing (msg ctxt i st); Seq.single st))
else K all_tac
fun msg_text msg _ _ _ = msg
fun msg_from_subgoal msg sgmsg ctxt i st =
case try (nth (Thm.prems_of st)) (i-1) of
NONE => msg ^ "\n" ^ "Subgoal out of range"
| SOME t => msg ^ "\n" ^ sgmsg t ctxt
fun msg_subgoal msg = msg_from_subgoal msg (fn t => fn ctxt =>
Syntax.pretty_term ctxt t |> Pretty.string_of
)
fun msg_allgoals msg ctxt _ st =
msg ^ "\n" ^ Pretty.string_of (Pretty.chunks (Goal_Display.pretty_goals ctxt st))
end
›
ML ‹
infix 1 THEN_NEXT THEN_ALL_NEW_LIST THEN_ALL_NEW_LIST'
signature STACTICAL = sig
val THEN_NEXT: tactic' * tactic' -> tactic'
val APPLY_LIST: tactic' list -> tactic'
val THEN_ALL_NEW_LIST: tactic' * tactic' list -> tactic'
val THEN_ALL_NEW_LIST': tactic' * (tactic' list * tactic') -> tactic'
end
structure STactical : STACTICAL = struct
infix 1 THEN_WITH_GOALDIFF
fun (tac1 THEN_WITH_GOALDIFF tac2) st = let
val n1 = Thm.nprems_of st
in
st |> (tac1 THEN (fn st => tac2 (Thm.nprems_of st - n1) st ))
end
fun (tac1 THEN_NEXT tac2) i =
tac1 i THEN_WITH_GOALDIFF (fn d => (
if d < ~1 then
(error "THEN_NEXT: Tactic solved more than one goal"; no_tac)
else
tac2 (i+1+d)
))
fun APPLY_LIST [] = K all_tac
| APPLY_LIST (tac::tacs) = tac THEN_NEXT APPLY_LIST tacs
fun (tac1 THEN_ALL_NEW_LIST tacs) i =
tac1 i
THEN_WITH_GOALDIFF (fn d =>
if d+1 <> length tacs then (
error "THEN_ALL_NEW_LIST: Tactic produced wrong number of goals"; no_tac
) else APPLY_LIST tacs i
)
fun (tac1 THEN_ALL_NEW_LIST' (tacs,rtac)) i =
tac1 i
THEN_WITH_GOALDIFF (fn d => let
val _ = if d+1 < length tacs then error "THEN_ALL_NEW_LIST': Tactic produced too few goals" else ();
val tacs' = tacs @ replicate (d + 1 - length tacs) rtac
in
APPLY_LIST tacs' i
end)
end
open STactical
›
end
Theory Sepref_Monadify
section ‹Monadify›
theory Sepref_Monadify
imports Sepref_Basic Sepref_Id_Op
begin
text ‹
In this phase, a monadic program is converted to complete monadic form,
that is, computation of compound expressions are made visible as top-level
operations in the monad.
The monadify process is separated into 2 steps.
\begin{enumerate}
\item In a first step, eta-expansion is used to add missing operands
to operations and combinators. This way, operators and combinators
always occur with the same arity, which simplifies further processing.
\item In a second step, computation of compound operands is flattened,
introducing new bindings for the intermediate values.
\end{enumerate}
›
definition SP
where [simp]: "SP x ≡ x"
lemma SP_cong[cong]: "SP x ≡ SP x" by simp
lemma PR_CONST_cong[cong]: "PR_CONST x ≡ PR_CONST x" by simp
definition RCALL
where [simp]: "RCALL D ≡ D"
definition EVAL
where [simp]: "EVAL x ≡ RETURN x"
text ‹
Internally, the package first applies rewriting rules from
‹sepref_monadify_arity›, which use eta-expansion to ensure that
every combinator has enough actual parameters. Moreover, this phase will
mark recursive calls by the tag @{const RCALL}.
Next, rewriting rules from ‹sepref_monadify_comb› are used to
add @{const EVAL}-tags to plain expressions that should be evaluated
in the monad. The @{const EVAL} tags are flattened using a default simproc
that generates left-to-right argument order.
›
lemma monadify_simps:
"Refine_Basic.bind$(RETURN$x)$(λ⇩2x. f x) = f x"
"EVAL$x ≡ RETURN$x"
by simp_all
definition [simp]: "PASS ≡ RETURN"
lemma remove_pass_simps:
"Refine_Basic.bind$(PASS$x)$(λ⇩2x. f x) ≡ f x"
"Refine_Basic.bind$m$(λ⇩2x. PASS$x) ≡ m"
by simp_all
definition COPY :: "'a ⇒ 'a"
where [simp]: "COPY x ≡ x"
lemma RET_COPY_PASS_eq: "RETURN$(COPY$p) = PASS$p" by simp
named_theorems_rev sepref_monadify_arity "Sepref.Monadify: Arity alignment equations"
named_theorems_rev sepref_monadify_comb "Sepref.Monadify: Combinator equations"
ML ‹
structure Sepref_Monadify = struct
local
fun cr_var (i,T) = ("v"^string_of_int i, Free ("__v"^string_of_int i,T))
fun lambda2_name n t = let
val t = @{mk_term "PROTECT2 ?t DUMMY"}
in
Term.lambda_name n t
end
fun
bind_args exp0 [] = exp0
| bind_args exp0 ((x,m)::xms) = let
val lr = bind_args exp0 xms
|> incr_boundvars 1
|> lambda2_name x
in @{mk_term "Refine_Basic.bind$?m$?lr"} end
fun monadify t = let
val (f,args) = Autoref_Tagging.strip_app t
val _ = not (is_Abs f) orelse
raise TERM ("monadify: higher-order",[t])
val argTs = map fastype_of args
val args = map (fn a => @{mk_term "EVAL$?a"}) args
val argVs = tag_list 0 argTs
|> map cr_var
val res0 = let
val x = Autoref_Tagging.list_APP (f,map #2 argVs)
in
@{mk_term "SP (RETURN$?x)"}
end
val res = bind_args res0 (argVs ~~ args)
in
res
end
fun monadify_conv_aux ctxt ct = case Thm.term_of ct of
@{mpat "EVAL$_"} => let
val ss = put_simpset HOL_basic_ss ctxt
val ss = (ss addsimps @{thms monadify_simps SP_def})
val tac = (simp_tac ss 1)
in (
Refine_Util.f_tac_conv ctxt (dest_comb #> #2 #> monadify) tac) ct
end
| t => raise TERM ("monadify_conv",[t])
in
val monadify_simproc =
Simplifier.make_simproc @{context} "monadify_simproc"
{lhss =
[Logic.varify_global @{term "EVAL$a"}],
proc = K (try o monadify_conv_aux)};
end
local
open Sepref_Basic
fun mark_params t = let
val (P,c,Q,R,a) = dest_hn_refine t
val pps = strip_star P |> map_filter (dest_hn_ctxt_opt #> map_option #2)
fun tr env (t as @{mpat "RETURN$?x"}) =
if is_Bound x orelse member (aconv) pps x then
@{mk_term env: "PASS$?x"}
else t
| tr env (t1$t2) = tr env t1 $ tr env t2
| tr env (Abs (x,T,t)) = Abs (x,T,tr (T::env) t)
| tr _ t = t
val a = tr [] a
in
mk_hn_refine (P,c,Q,R,a)
end
in
fun mark_params_conv ctxt = Refine_Util.f_tac_conv ctxt
(mark_params)
(simp_tac (put_simpset HOL_basic_ss ctxt addsimps @{thms PASS_def}) 1)
end
local
open Sepref_Basic
fun dp ctxt (@{mpat "Refine_Basic.bind$(PASS$?p)$(?t' AS⇩p (λ_. PROTECT2 _ DUMMY))"}) =
let
val (t',ps) = let
val ((t',rc),ctxt) = dest_lambda_rc ctxt t'
val f = case t' of @{mpat "PROTECT2 ?f _"} => f | _ => raise Match
val (f,ps) = dp ctxt f
val t' = @{mk_term "PROTECT2 ?f DUMMY"}
val t' = rc t'
in
(t',ps)
end
val dup = member (aconv) ps p
val t = if dup then
@{mk_term "Refine_Basic.bind$(RETURN$(COPY$?p))$?t'"}
else
@{mk_term "Refine_Basic.bind$(PASS$?p)$?t'"}
in
(t,p::ps)
end
| dp ctxt (t1$t2) = (#1 (dp ctxt t1) $ #1 (dp ctxt t2),[])
| dp ctxt (t as (Abs _)) = (apply_under_lambda (#1 oo dp) ctxt t,[])
| dp _ t = (t,[])
fun dp_conv ctxt = Refine_Util.f_tac_conv ctxt
(#1 o dp ctxt)
(ALLGOALS (simp_tac (put_simpset HOL_basic_ss ctxt addsimps @{thms RET_COPY_PASS_eq})))
in
fun dup_tac ctxt = CONVERSION (Sepref_Basic.hn_refine_concl_conv_a dp_conv ctxt)
end
fun arity_tac ctxt = let
val arity1_ss = put_simpset HOL_basic_ss ctxt
addsimps ((Named_Theorems_Rev.get ctxt @{named_theorems_rev sepref_monadify_arity}))
|> Simplifier.add_cong @{thm SP_cong}
|> Simplifier.add_cong @{thm PR_CONST_cong}
val arity2_ss = put_simpset HOL_basic_ss ctxt
addsimps @{thms beta SP_def}
in
simp_tac arity1_ss THEN' simp_tac arity2_ss
end
fun comb_tac ctxt = let
val comb1_ss = put_simpset HOL_basic_ss ctxt
addsimps (Named_Theorems_Rev.get ctxt @{named_theorems_rev sepref_monadify_comb})
addsimprocs [monadify_simproc]
|> Simplifier.add_cong @{thm SP_cong}
|> Simplifier.add_cong @{thm PR_CONST_cong}
val comb2_ss = put_simpset HOL_basic_ss ctxt
addsimps @{thms SP_def}
in
simp_tac comb1_ss THEN' simp_tac comb2_ss
end
fun mark_params_tac ctxt = CONVERSION (
Refine_Util.HOL_concl_conv (K (mark_params_conv ctxt)) ctxt)
fun contains_eval @{mpat "Trueprop (hn_refine _ _ _ _ ?a)"} =
Term.exists_subterm (fn @{mpat EVAL} => true | _ => false) a
| contains_eval t = raise TERM("contains_eval",[t]);
fun remove_pass_tac ctxt =
simp_tac (put_simpset HOL_basic_ss ctxt addsimps @{thms remove_pass_simps})
fun monadify_tac dbg ctxt = let
open Sepref_Basic
in
PHASES' [
("arity", arity_tac, 0),
("comb", comb_tac, 0),
("check_EVAL", K (CONCL_COND' (not o contains_eval)), 0),
("mark_params", mark_params_tac, 0),
("dup", dup_tac, 0),
("remove_pass", remove_pass_tac, 0)
] (flag_phases_ctrl dbg) ctxt
end
end
›
lemma dflt_arity[sepref_monadify_arity]:
"RETURN ≡ λ⇩2x. SP RETURN$x"
"RECT ≡ λ⇩2B x. SP RECT$(λ⇩2D x. B$(λ⇩2x. RCALL$D$x)$x)$x"
"case_list ≡ λ⇩2fn fc l. SP case_list$fn$(λ⇩2x xs. fc$x$xs)$l"
"case_prod ≡ λ⇩2fp p. SP case_prod$(λ⇩2a b. fp$a$b)$p"
"case_option ≡ λ⇩2fn fs ov. SP case_option$fn$(λ⇩2x. fs$x)$ov"
"If ≡ λ⇩2b t e. SP If$b$t$e"
"Let ≡ λ⇩2x f. SP Let$x$(λ⇩2x. f$x)"
by (simp_all only: SP_def APP_def PROTECT2_def RCALL_def)
lemma dflt_comb[sepref_monadify_comb]:
"⋀B x. RECT$B$x ≡ Refine_Basic.bind$(EVAL$x)$(λ⇩2x. SP (RECT$B$x))"
"⋀D x. RCALL$D$x ≡ Refine_Basic.bind$(EVAL$x)$(λ⇩2x. SP (RCALL$D$x))"
"⋀fn fc l. case_list$fn$fc$l ≡ Refine_Basic.bind$(EVAL$l)$(λ⇩2l. (SP case_list$fn$fc$l))"
"⋀fp p. case_prod$fp$p ≡ Refine_Basic.bind$(EVAL$p)$(λ⇩2p. (SP case_prod$fp$p))"
"⋀fn fs ov. case_option$fn$fs$ov
≡ Refine_Basic.bind$(EVAL$ov)$(λ⇩2ov. (SP case_option$fn$fs$ov))"
"⋀b t e. If$b$t$e ≡ Refine_Basic.bind$(EVAL$b)$(λ⇩2b. (SP If$b$t$e))"
"⋀x. RETURN$x ≡ Refine_Basic.bind$(EVAL$x)$(λ⇩2x. SP (RETURN$x))"
"⋀x f. Let$x$f ≡ Refine_Basic.bind$(EVAL$x)$(λ⇩2x. (SP Let$x$f))"
by (simp_all)
lemma dflt_plain_comb[sepref_monadify_comb]:
"EVAL$(If$b$t$e) ≡ Refine_Basic.bind$(EVAL$b)$(λ⇩2b. If$b$(EVAL$t)$(EVAL$e))"
"EVAL$(case_list$fn$(λ⇩2x xs. fc x xs)$l) ≡
Refine_Basic.bind$(EVAL$l)$(λ⇩2l. case_list$(EVAL$fn)$(λ⇩2x xs. EVAL$(fc x xs))$l)"
"EVAL$(case_prod$(λ⇩2a b. fp a b)$p) ≡
Refine_Basic.bind$(EVAL$p)$(λ⇩2p. case_prod$(λ⇩2a b. EVAL$(fp a b))$p)"
"EVAL$(case_option$fn$(λ⇩2x. fs x)$ov) ≡
Refine_Basic.bind$(EVAL$ov)$(λ⇩2ov. case_option$(EVAL$fn)$(λ⇩2x. EVAL$(fs x))$ov)"
"EVAL $ (Let $ v $ (λ⇩2x. f x)) ≡ (⤜) $ (EVAL $ v) $ (λ⇩2x. EVAL $ (f x))"
apply (rule eq_reflection, simp split: list.split prod.split option.split)+
done
lemma evalcomb_PR_CONST[sepref_monadify_comb]:
"EVAL$(PR_CONST x) ≡ SP (RETURN$(PR_CONST x))"
by simp
end
Theory Sepref_Constraints
theory Sepref_Constraints
imports Main Automatic_Refinement.Refine_Lib Sepref_Basic
begin
definition "CONSTRAINT_SLOT (x::prop) ≡ x"
lemma insert_slot_rl1:
assumes "PROP P ⟹ PROP (CONSTRAINT_SLOT (Trueprop True)) ⟹ PROP Q"
shows "PROP (CONSTRAINT_SLOT (PROP P)) ⟹ PROP Q"
using assms unfolding CONSTRAINT_SLOT_def by simp
lemma insert_slot_rl2:
assumes "PROP P ⟹ PROP (CONSTRAINT_SLOT S) ⟹ PROP Q"
shows "PROP (CONSTRAINT_SLOT (PROP S &&& PROP P)) ⟹ PROP Q"
using assms unfolding CONSTRAINT_SLOT_def conjunction_def .
lemma remove_slot: "PROP (CONSTRAINT_SLOT (Trueprop True))"
unfolding CONSTRAINT_SLOT_def by (rule TrueI)
definition CONSTRAINT where [simp]: "CONSTRAINT P x ≡ P x"
lemma CONSTRAINT_D:
assumes "CONSTRAINT (P::'a => bool) x"
shows "P x"
using assms unfolding CONSTRAINT_def by simp
lemma CONSTRAINT_I:
assumes "P x"
shows "CONSTRAINT (P::'a => bool) x"
using assms unfolding CONSTRAINT_def by simp
text ‹Special predicate to indicate unsolvable constraint.
The constraint solver refuses to put those into slot.
Thus, adding safe rules introducing this can be used to indicate
unsolvable constraints early.
›
definition CN_FALSE :: "('a⇒bool) ⇒ 'a ⇒ bool" where [simp]: "CN_FALSE P x ≡ False"
lemma CN_FALSEI: "CN_FALSE P x ⟹ P x" by simp
named_theorems constraint_simps ‹Simplification of constraints›
named_theorems constraint_abbrevs ‹Constraint Solver: Abbreviations›
lemmas split_constraint_rls
= atomize_conj[symmetric] imp_conjunction all_conjunction conjunction_imp
ML ‹
signature SEPREF_CONSTRAINTS = sig
val WITH_SLOT: tactic' -> tactic
val ON_SLOT: tactic -> tactic
val create_slot_tac: tactic
val ensure_slot_tac: tactic
val remove_slot_tac: tactic
val prefer_slot_tac: tactic
val dest_slot_tac: tactic'
val has_slot: thm -> bool
val to_slot_tac: tactic'
val print_slot_tac: Proof.context -> tactic
val focus: tactic
val unfocus: tactic
val unfocus_ins:tactic
val cond_focus: (term -> bool) -> tactic
val some_to_slot_tac: (term -> bool) -> tactic
val is_constraint_goal: term -> bool
val is_constraint_tac: tactic'
val slot_constraint_tac: int -> tactic
val add_constraint_rule: thm -> Context.generic -> Context.generic
val del_constraint_rule: thm -> Context.generic -> Context.generic
val get_constraint_rules: Proof.context -> thm list
val add_safe_constraint_rule: thm -> Context.generic -> Context.generic
val del_safe_constraint_rule: thm -> Context.generic -> Context.generic
val get_safe_constraint_rules: Proof.context -> thm list
val solve_constraint_tac: Proof.context -> tactic'
val safe_constraint_tac: Proof.context -> tactic'
val solve_constraint'_tac: Proof.context -> tactic'
val safe_constraint'_tac: Proof.context -> tactic'
val constraint_tac: Proof.context -> tactic'
val process_constraint_slot: Proof.context -> tactic
val solve_constraint_slot: Proof.context -> tactic
val setup: theory -> theory
end
structure Sepref_Constraints: SEPREF_CONSTRAINTS = struct
fun is_slot_goal @{mpat "CONSTRAINT_SLOT _"} = true | is_slot_goal _ = false
fun slot_goal_num st = let
val i = find_index is_slot_goal (Thm.prems_of st) + 1
in
i
end
fun has_slot st = slot_goal_num st > 0
fun WITH_SLOT tac st = let
val si = slot_goal_num st
in
if si>0 then tac si st else (warning "Constraints: No slot"; Seq.empty)
end
val to_slot_tac = IF_EXGOAL (fn i => WITH_SLOT (fn si =>
if i<si then
prefer_tac si THEN prefer_tac (i+1)
THEN (
PRIMITIVE (fn st => Drule.comp_no_flatten (st, 0) 1 @{thm insert_slot_rl1})
ORELSE PRIMITIVE (fn st => Drule.comp_no_flatten (st, 0) 1 @{thm insert_slot_rl2})
)
THEN defer_tac 1
else no_tac))
val create_slot_tac =
COND (has_slot) no_tac
(PRIMITIVE (Thm.implies_intr @{cterm "CONSTRAINT_SLOT (Trueprop True)"})
THEN defer_tac 1)
val ensure_slot_tac = TRY create_slot_tac
val prefer_slot_tac = WITH_SLOT prefer_tac
val dest_slot_tac = SELECT_GOAL (
ALLGOALS (
CONVERSION (Conv.rewr_conv @{thm CONSTRAINT_SLOT_def})
THEN' Goal.conjunction_tac
THEN' TRY o resolve0_tac @{thms TrueI})
THEN distinct_subgoals_tac
)
val remove_slot_tac = WITH_SLOT (resolve0_tac @{thms remove_slot})
val focus = WITH_SLOT (fn i =>
PRIMITIVE (Goal.restrict i 1)
THEN ALLGOALS dest_slot_tac
THEN create_slot_tac)
val unfocus_ins =
PRIMITIVE (Goal.unrestrict 1)
THEN WITH_SLOT defer_tac
fun some_to_slot_tac cond = (ALLGOALS (COND' (fn t => is_slot_goal t orelse not (cond t)) ORELSE' to_slot_tac))
val unfocus =
some_to_slot_tac (K true)
THEN unfocus_ins
fun cond_focus cond =
focus
THEN some_to_slot_tac (not o cond)
fun ON_SLOT tac = focus THEN tac THEN unfocus
fun print_slot_tac ctxt = ON_SLOT (print_tac ctxt "SLOT:")
local
fun unfold_abbrevs ctxt =
Local_Defs.unfold0 ctxt (
@{thms split_constraint_rls CONSTRAINT_def}
@ Named_Theorems.get ctxt @{named_theorems constraint_abbrevs}
@ Named_Theorems.get ctxt @{named_theorems constraint_simps})
#> Conjunction.elim_conjunctions
fun check_constraint_rl thm = let
fun ck (t as @{mpat "Trueprop (?C _)"}) =
if is_Var (Term.head_of C) then
raise TERM ("Schematic head in constraint rule",[t,Thm.prop_of thm])
else ()
| ck @{mpat "⋀_. PROP ?t"} = ck t
| ck @{mpat "PROP ?s ⟹ PROP ?t"} = (ck s; ck t)
| ck t = raise TERM ("Invalid part of constraint rule",[t,Thm.prop_of thm])
in
ck (Thm.prop_of thm); thm
end
fun check_unsafe_constraint_rl thm = let
val _ = Thm.nprems_of thm = 0
andalso raise TERM("Unconditional constraint rule must be safe (register this as safe rule)",[Thm.prop_of thm])
in
thm
end
in
structure constraint_rules = Named_Sorted_Thms (
val name = @{binding constraint_rules}
val description = "Constraint rules"
val sort = K I
fun transform context = let
open Conv
val ctxt = Context.proof_of context
in
unfold_abbrevs ctxt #> map (check_constraint_rl o check_unsafe_constraint_rl)
end
)
structure safe_constraint_rules = Named_Sorted_Thms (
val name = @{binding safe_constraint_rules}
val description = "Safe Constraint rules"
val sort = K I
fun transform context = let
open Conv
val ctxt = Context.proof_of context
in
unfold_abbrevs ctxt #> map check_constraint_rl
end
)
end
val add_constraint_rule = constraint_rules.add_thm
val del_constraint_rule = constraint_rules.del_thm
val get_constraint_rules = constraint_rules.get
val add_safe_constraint_rule = safe_constraint_rules.add_thm
val del_safe_constraint_rule = safe_constraint_rules.del_thm
val get_safe_constraint_rules = safe_constraint_rules.get
fun is_constraint_goal t = case Logic.strip_assums_concl t of
@{mpat "Trueprop (CONSTRAINT _ _)"} => true
| _ => false
val is_constraint_tac = COND' is_constraint_goal
fun is_slottable_constraint_goal t = case Logic.strip_assums_concl t of
@{mpat "Trueprop (CONSTRAINT (CN_FALSE _) _)"} => false
| @{mpat "Trueprop (CONSTRAINT _ _)"} => true
| _ => false
val slot_constraint_tac = COND' is_slottable_constraint_goal THEN' to_slot_tac
datatype 'a seq_cases = SC_NONE | SC_SINGLE of 'a Seq.seq | SC_MULTIPLE of 'a Seq.seq
fun seq_cases seq =
case Seq.pull seq of
NONE => SC_NONE
| SOME (st1,seq) => case Seq.pull seq of
NONE => SC_SINGLE (Seq.single st1)
| SOME (st2,seq) => SC_MULTIPLE (Seq.cons st1 (Seq.cons st2 seq))
fun SEQ_CASES tac (single_tac, multiple_tac) st = let
val res = tac st
in
case seq_cases res of
SC_NONE => Seq.empty
| SC_SINGLE res => Seq.maps single_tac res
| SC_MULTIPLE res => Seq.maps multiple_tac res
end
fun SAFE tac = SEQ_CASES tac (all_tac, no_tac)
fun SAFE' tac = SAFE o tac
local
fun simp_constraints_tac ctxt = let
val ctxt = put_simpset HOL_basic_ss ctxt
addsimps (Named_Theorems.get ctxt @{named_theorems constraint_simps})
in
simp_tac ctxt
end
fun unfold_abbrevs_tac ctxt = let
val ctxt = put_simpset HOL_basic_ss ctxt
addsimps (Named_Theorems.get ctxt @{named_theorems constraint_abbrevs})
val ethms = @{thms conjE}
val ithms = @{thms conjI}
in
full_simp_tac ctxt
THEN_ALL_NEW TRY o REPEAT_ALL_NEW (ematch_tac ctxt ethms)
THEN_ALL_NEW TRY o REPEAT_ALL_NEW (match_tac ctxt ithms)
end
fun WITH_RULE_NETS tac ctxt = let
val scn_net = safe_constraint_rules.get ctxt |> Tactic.build_net
val cn_net = constraint_rules.get ctxt |> Tactic.build_net
in
tac (scn_net,cn_net) ctxt
end
fun wrap_tac step_tac ctxt = REPEAT_ALL_NEW (
simp_constraints_tac ctxt
THEN_ALL_NEW unfold_abbrevs_tac ctxt
THEN_ALL_NEW step_tac ctxt
)
fun solve_step_tac (scn_net,cn_net) ctxt = REPEAT_ALL_NEW (
DETERM o resolve_from_net_tac ctxt scn_net
ORELSE' resolve_from_net_tac ctxt cn_net
)
fun safe_step_tac (scn_net,cn_net) ctxt = REPEAT_ALL_NEW (
DETERM o resolve_from_net_tac ctxt scn_net
ORELSE' SAFE' (resolve_from_net_tac ctxt cn_net)
)
fun solve_tac cn_nets ctxt = SOLVED' (wrap_tac (solve_step_tac cn_nets) ctxt)
fun safe_tac cn_nets ctxt =
simp_constraints_tac ctxt
THEN_ALL_NEW unfold_abbrevs_tac ctxt
THEN_ALL_NEW (solve_tac cn_nets ctxt ORELSE' TRY o wrap_tac (safe_step_tac cn_nets) ctxt)
in
val solve_constraint_tac = TRADE (fn ctxt =>
is_constraint_tac
THEN' resolve_tac ctxt @{thms CONSTRAINT_I}
THEN' WITH_RULE_NETS solve_tac ctxt)
val safe_constraint_tac = TRADE (fn ctxt =>
is_constraint_tac
THEN' resolve_tac ctxt @{thms CONSTRAINT_I}
THEN' WITH_RULE_NETS safe_tac ctxt
THEN_ALL_NEW fo_resolve_tac @{thms CONSTRAINT_D} ctxt)
val solve_constraint'_tac = TRADE (fn ctxt =>
TRY o resolve_tac ctxt @{thms CONSTRAINT_I}
THEN' WITH_RULE_NETS solve_tac ctxt)
val safe_constraint'_tac = TRADE (fn ctxt =>
TRY o resolve_tac ctxt @{thms CONSTRAINT_I}
THEN' WITH_RULE_NETS safe_tac ctxt)
end
fun constraint_tac ctxt =
safe_constraint_tac ctxt THEN_ALL_NEW slot_constraint_tac
fun process_constraint_slot ctxt = ON_SLOT (ALLGOALS (TRY o safe_constraint_tac ctxt))
fun solve_constraint_slot ctxt =
cond_focus is_constraint_goal
THEN ALLGOALS (
COND' is_slot_goal
ORELSE' (
solve_constraint_tac ctxt
ORELSE' TRY o safe_constraint_tac ctxt
)
)
THEN unfocus_ins
val setup = I
#> constraint_rules.setup
#> safe_constraint_rules.setup
end
›
setup Sepref_Constraints.setup
method_setup print_slot = ‹Scan.succeed (fn ctxt => SIMPLE_METHOD (Sepref_Constraints.print_slot_tac ctxt))›
method_setup solve_constraint = ‹Scan.succeed (fn ctxt => SIMPLE_METHOD' (Sepref_Constraints.solve_constraint'_tac ctxt))›
method_setup safe_constraint = ‹Scan.succeed (fn ctxt => SIMPLE_METHOD' (Sepref_Constraints.safe_constraint'_tac ctxt))›
end
Theory Sepref_Frame
section ‹Frame Inference›
theory Sepref_Frame
imports Sepref_Basic Sepref_Constraints
begin
text ‹ In this theory, we provide a specific frame inference tactic
for Sepref.
The first tactic, ‹frame_tac›, is a standard frame inference tactic,
based on the assumption that only @{const hn_ctxt}-assertions need to be
matched.
The second tactic, ‹merge_tac›, resolves entailments of the form
‹F1 ∨⇩A F2 ⟹⇩t ?F›
that occur during translation of if and case statements.
It synthesizes a new frame ?F, where refinements of variables
with equal refinements in ‹F1› and ‹F2› are preserved,
and the others are set to @{const hn_invalid}.
›
definition mismatch_assn :: "('a ⇒ 'c ⇒ assn) ⇒ ('a ⇒ 'c ⇒ assn) ⇒ 'a ⇒ 'c ⇒ assn"
where "mismatch_assn R1 R2 x y ≡ R1 x y ∨⇩A R2 x y"
abbreviation "hn_mismatch R1 R2 ≡ hn_ctxt (mismatch_assn R1 R2)"
lemma recover_pure_aux: "CONSTRAINT is_pure R ⟹ hn_invalid R x y ⟹⇩t hn_ctxt R x y"
by (auto simp: is_pure_conv invalid_pure_recover hn_ctxt_def)
lemma frame_thms:
"P ⟹⇩t P"
"P⟹⇩tP' ⟹ F⟹⇩tF' ⟹ F*P ⟹⇩t F'*P'"
"hn_ctxt R x y ⟹⇩t hn_invalid R x y"
"hn_ctxt R x y ⟹⇩t hn_ctxt (λ_ _. true) x y"
"CONSTRAINT is_pure R ⟹ hn_invalid R x y ⟹⇩t hn_ctxt R x y"
apply -
applyS simp
applyS (rule entt_star_mono; assumption)
subgoal
apply (simp add: hn_ctxt_def)
apply (rule enttI)
apply (rule ent_trans[OF invalidate[of R]])
by solve_entails
applyS (sep_auto simp: hn_ctxt_def)
applyS (erule recover_pure_aux)
done
named_theorems_rev sepref_frame_match_rules ‹Sepref: Additional frame rules›
text ‹Rules to discharge unmatched stuff›
lemma frame_rem1: "P⟹⇩tP" by simp
lemma frame_rem2: "F ⟹⇩t F' ⟹ F * hn_ctxt A x y ⟹⇩t F' * hn_ctxt A x y"
apply (rule entt_star_mono) by auto
lemma frame_rem3: "F ⟹⇩t F' ⟹ F * hn_ctxt A x y ⟹⇩t F'"
using frame_thms(2) by fastforce
lemma frame_rem4: "P ⟹⇩t emp" by simp
lemmas frame_rem_thms = frame_rem1 frame_rem2 frame_rem3 frame_rem4
named_theorems_rev sepref_frame_rem_rules
‹Sepref: Additional rules to resolve remainder of frame-pairing›
lemma ent_disj_star_mono:
"⟦ A ∨⇩A C ⟹⇩A E; B ∨⇩A D ⟹⇩A F ⟧ ⟹ A*B ∨⇩A C*D ⟹⇩A E*F"
by (metis ent_disjI1 ent_disjI2 ent_disjE ent_star_mono)
lemma entt_disj_star_mono:
"⟦ A ∨⇩A C ⟹⇩t E; B ∨⇩A D ⟹⇩t F ⟧ ⟹ A*B ∨⇩A C*D ⟹⇩t E*F"
proof -
assume a1: "A ∨⇩A C ⟹⇩t E"
assume "B ∨⇩A D ⟹⇩t F"
then have "A * B ∨⇩A C * D ⟹⇩A true * E * (true * F)"
using a1 by (simp add: ent_disj_star_mono enttD)
then show ?thesis
by (metis (no_types) assn_times_comm enttI merge_true_star_ctx star_aci(3))
qed
lemma hn_merge1:
"F ∨⇩A F ⟹⇩t F"
"⟦ hn_ctxt R1 x x' ∨⇩A hn_ctxt R2 x x' ⟹⇩t hn_ctxt R x x'; Fl ∨⇩A Fr ⟹⇩t F ⟧
⟹ Fl * hn_ctxt R1 x x' ∨⇩A Fr * hn_ctxt R2 x x' ⟹⇩t F * hn_ctxt R x x'"
apply simp
by (rule entt_disj_star_mono; simp)
lemma hn_merge2:
"hn_invalid R x x' ∨⇩A hn_ctxt R x x' ⟹⇩t hn_invalid R x x'"
"hn_ctxt R x x' ∨⇩A hn_invalid R x x' ⟹⇩t hn_invalid R x x'"
by (sep_auto eintros: invalidate ent_disjE intro!: ent_imp_entt simp: hn_ctxt_def)+
lemma invalid_assn_mono: "hn_ctxt A x y ⟹⇩t hn_ctxt B x y
⟹ hn_invalid A x y ⟹⇩t hn_invalid B x y"
by (clarsimp simp: invalid_assn_def entailst_def entails_def hn_ctxt_def)
(force simp: mod_star_conv)
lemma hn_merge3:
"⟦NO_MATCH (hn_invalid XX) R2; hn_ctxt R1 x x' ∨⇩A hn_ctxt R2 x x' ⟹⇩t hn_ctxt Rm x x'⟧ ⟹ hn_invalid R1 x x' ∨⇩A hn_ctxt R2 x x' ⟹⇩t hn_invalid Rm x x'"
"⟦NO_MATCH (hn_invalid XX) R1; hn_ctxt R1 x x' ∨⇩A hn_ctxt R2 x x' ⟹⇩t hn_ctxt Rm x x'⟧ ⟹ hn_ctxt R1 x x' ∨⇩A hn_invalid R2 x x' ⟹⇩t hn_invalid Rm x x'"
apply (meson entt_disjD1 entt_disjD2 entt_disjE entt_trans frame_thms(3) invalid_assn_mono)
apply (meson entt_disjD1 entt_disjD2 entt_disjE entt_trans frame_thms(3) invalid_assn_mono)
done
lemmas merge_thms = hn_merge1 hn_merge2
named_theorems sepref_frame_merge_rules ‹Sepref: Additional merge rules›
lemma hn_merge_mismatch: "hn_ctxt R1 x x' ∨⇩A hn_ctxt R2 x x' ⟹⇩t hn_mismatch R1 R2 x x'"
by (sep_auto simp: hn_ctxt_def mismatch_assn_def)
lemma is_merge: "P1∨⇩AP2⟹⇩tP ⟹ P1∨⇩AP2⟹⇩tP" .
lemma merge_mono: "⟦A⟹⇩tA'; B⟹⇩tB'; A'∨⇩AB' ⟹⇩t C⟧ ⟹ A∨⇩AB ⟹⇩t C"
by (meson entt_disjE entt_disjI1_direct entt_disjI2_direct entt_trans)
text ‹Apply forward rule on left or right side of merge›
lemma gen_merge_cons1: "⟦A⟹⇩tA'; A'∨⇩AB ⟹⇩t C⟧ ⟹ A∨⇩AB ⟹⇩t C"
by (meson merge_mono entt_refl)
lemma gen_merge_cons2: "⟦B⟹⇩tB'; A∨⇩AB' ⟹⇩t C⟧ ⟹ A∨⇩AB ⟹⇩t C"
by (meson merge_mono entt_refl)
lemmas gen_merge_cons = gen_merge_cons1 gen_merge_cons2
text ‹These rules are applied to recover pure values that have been destroyed by rule application›
definition "RECOVER_PURE P Q ≡ P ⟹⇩t Q"
lemma recover_pure:
"RECOVER_PURE emp emp"
"⟦RECOVER_PURE P2 Q2; RECOVER_PURE P1 Q1⟧ ⟹ RECOVER_PURE (P1*P2) (Q1*Q2)"
"CONSTRAINT is_pure R ⟹ RECOVER_PURE (hn_invalid R x y) (hn_ctxt R x y)"
"RECOVER_PURE (hn_ctxt R x y) (hn_ctxt R x y)"
unfolding RECOVER_PURE_def
subgoal by sep_auto
subgoal by (drule (1) entt_star_mono)
subgoal by (rule recover_pure_aux)
subgoal by sep_auto
done
lemma recover_pure_triv:
"RECOVER_PURE P P"
unfolding RECOVER_PURE_def by sep_auto
text ‹Weakening the postcondition by converting @{const invalid_assn} to @{term "λ_ _. true"}›
definition "WEAKEN_HNR_POST Γ Γ' Γ'' ≡ (∃h. h⊨Γ) ⟶ (Γ'' ⟹⇩t Γ')"
lemma weaken_hnr_postI:
assumes "WEAKEN_HNR_POST Γ Γ'' Γ'"
assumes "hn_refine Γ c Γ' R a"
shows "hn_refine Γ c Γ'' R a"
apply (rule hn_refine_preI)
apply (rule hn_refine_cons_post)
apply (rule assms)
using assms(1) unfolding WEAKEN_HNR_POST_def by blast
lemma weaken_hnr_post_triv: "WEAKEN_HNR_POST Γ P P"
unfolding WEAKEN_HNR_POST_def
by sep_auto
lemma weaken_hnr_post:
"⟦WEAKEN_HNR_POST Γ P P'; WEAKEN_HNR_POST Γ' Q Q'⟧ ⟹ WEAKEN_HNR_POST (Γ*Γ') (P*Q) (P'*Q')"
"WEAKEN_HNR_POST (hn_ctxt R x y) (hn_ctxt R x y) (hn_ctxt R x y)"
"WEAKEN_HNR_POST (hn_ctxt R x y) (hn_invalid R x y) (hn_ctxt (λ_ _. true) x y)"
proof (goal_cases)
case 1 thus ?case
unfolding WEAKEN_HNR_POST_def
apply clarsimp
apply (rule entt_star_mono)
by (auto simp: mod_star_conv)
next
case 2 thus ?case by (rule weaken_hnr_post_triv)
next
case 3 thus ?case
unfolding WEAKEN_HNR_POST_def
by (sep_auto simp: invalid_assn_def hn_ctxt_def)
qed
lemma reorder_enttI:
assumes "A*true = C*true"
assumes "B*true = D*true"
shows "(A⟹⇩tB) ≡ (C⟹⇩tD)"
apply (intro eq_reflection)
unfolding entt_def_true
by (simp add: assms)
lemma merge_sat1: "(A∨⇩AA' ⟹⇩t Am) ⟹ (A∨⇩AAm ⟹⇩t Am)"
using entt_disjD1 entt_disjE by blast
lemma merge_sat2: "(A∨⇩AA' ⟹⇩t Am) ⟹ (Am∨⇩AA' ⟹⇩t Am)"
using entt_disjD2 entt_disjE by blast
ML ‹
signature SEPREF_FRAME = sig
val is_merge: term -> bool
val frame_tac: (Proof.context -> tactic') -> Proof.context -> tactic'
val merge_tac: (Proof.context -> tactic') -> Proof.context -> tactic'
val frame_step_tac: (Proof.context -> tactic') -> bool -> Proof.context -> tactic'
val prepare_frame_tac : Proof.context -> tactic'
val recover_pure_tac: Proof.context -> tactic'
val align_goal_tac: Proof.context -> tactic'
val norm_goal_pre_tac: Proof.context -> tactic'
val align_rl_conv: Proof.context -> conv
val weaken_post_tac: Proof.context -> tactic'
val add_normrel_eq : thm -> Context.generic -> Context.generic
val del_normrel_eq : thm -> Context.generic -> Context.generic
val get_normrel_eqs : Proof.context -> thm list
val cfg_debug: bool Config.T
val setup: theory -> theory
end
structure Sepref_Frame : SEPREF_FRAME = struct
val cfg_debug =
Attrib.setup_config_bool @{binding sepref_debug_frame} (K false)
val DCONVERSION = Sepref_Debugging.DBG_CONVERSION cfg_debug
val dbg_msg_tac = Sepref_Debugging.dbg_msg_tac cfg_debug
structure normrel_eqs = Named_Thms (
val name = @{binding sepref_frame_normrel_eqs}
val description = "Equations to normalize relations for frame matching"
)
val add_normrel_eq = normrel_eqs.add_thm
val del_normrel_eq = normrel_eqs.del_thm
val get_normrel_eqs = normrel_eqs.get
val mk_entailst = HOLogic.mk_binrel @{const_name "entailst"}
local
open Sepref_Basic Refine_Util Conv
fun assn_ord p = case apply2 dest_hn_ctxt_opt p of
(NONE,NONE) => EQUAL
| (SOME _, NONE) => LESS
| (NONE, SOME _) => GREATER
| (SOME (_,a,_), SOME (_,a',_)) => Term_Ord.fast_term_ord (a,a')
in
fun reorder_ctxt_conv ctxt ct = let
val cert = Thm.cterm_of ctxt
val new_ct = Thm.term_of ct
|> strip_star
|> sort assn_ord
|> list_star
|> cert
val thm = Goal.prove_internal ctxt [] (mk_cequals (ct,new_ct))
(fn _ => simp_tac
(put_simpset HOL_basic_ss ctxt addsimps @{thms star_aci}) 1)
in
thm
end
fun prepare_fi_conv ctxt ct = case Thm.term_of ct of
@{mpat "?P ⟹⇩t ?Q"} => let
val cert = Thm.cterm_of ctxt
val (Qm, Qum) = strip_star Q |> filter_out is_true |> List.partition is_hn_ctxt
val Qtab = (
Qm |> map (fn x => (#2 (dest_hn_ctxt x),(NONE,x)))
|> Termtab.make
) handle
e as (Termtab.DUP _) => (
tracing ("Dup heap: " ^ @{make_string} ct); raise e)
val (Qtab,Pum) = fold (fn a => fn (Qtab,Pum) =>
case dest_hn_ctxt_opt a of
NONE => (Qtab,a::Pum)
| SOME (_,p,_) => ( case Termtab.lookup Qtab p of
SOME (NONE,tg) => (Termtab.update (p,(SOME a,tg)) Qtab, Pum)
| _ => (Qtab,a::Pum)
)
) (strip_star P) (Qtab,[])
val Pum = filter_out is_true Pum
val (pairs,Qum2) = Termtab.dest Qtab |> map #2
|> List.partition (is_some o #1)
|> apfst (map (apfst the))
|> apsnd (map #2)
val P' = mk_star (list_star (map fst pairs), list_star Pum)
val Q' = mk_star (list_star (map snd pairs), list_star (Qum2@Qum))
val new_ct = mk_entailst (P', Q') |> cert
val msg_tac = dbg_msg_tac (Sepref_Debugging.msg_allgoals "Solving frame permutation") ctxt 1
val tac = msg_tac THEN ALLGOALS (resolve_tac ctxt @{thms reorder_enttI}) THEN star_permute_tac ctxt
val thm = Goal.prove_internal ctxt [] (mk_cequals (ct,new_ct)) (fn _ => tac)
in
thm
end
| _ => no_conv ct
end
fun is_merge @{mpat "Trueprop (_ ∨⇩A _ ⟹⇩t _)"} = true | is_merge _ = false
fun is_gen_frame @{mpat "Trueprop (_ ⟹⇩t _)"} = true | is_gen_frame _ = false
fun prepare_frame_tac ctxt = let
open Refine_Util Conv
val frame_ss = put_simpset HOL_basic_ss ctxt addsimps
@{thms mult_1_right[where 'a=assn] mult_1_left[where 'a=assn]}
in
CONVERSION Thm.eta_conversion THEN'
simp_tac frame_ss THEN'
CONVERSION (HOL_concl_conv (fn _ => prepare_fi_conv ctxt) ctxt)
end
local
fun wrap_side_tac side_tac dbg tac = tac THEN_ALL_NEW_FWD (
CONCL_COND' is_gen_frame
ORELSE' (if dbg then TRY_SOLVED' else SOLVED') side_tac
)
in
fun frame_step_tac side_tac dbg ctxt = let
open Refine_Util Conv
val side_tac = Sepref_Constraints.constraint_tac ctxt ORELSE' side_tac ctxt
val frame_thms = @{thms frame_thms} @
Named_Theorems_Rev.get ctxt @{named_theorems_rev sepref_frame_match_rules}
val merge_thms = @{thms merge_thms} @
Named_Theorems.get ctxt @{named_theorems sepref_frame_merge_rules}
val ss = put_simpset HOL_basic_ss ctxt addsimps normrel_eqs.get ctxt
fun frame_thm_tac dbg = wrap_side_tac side_tac dbg (resolve_tac ctxt frame_thms)
fun merge_thm_tac dbg = wrap_side_tac side_tac dbg (resolve_tac ctxt merge_thms)
fun thm_tac dbg = CONCL_COND' is_merge THEN_ELSE' (merge_thm_tac dbg, frame_thm_tac dbg)
in
full_simp_tac ss THEN' thm_tac dbg
end
end
fun frame_loop_tac side_tac ctxt = let
in
TRY o (
REPEAT_ALL_NEW (DETERM o frame_step_tac side_tac false ctxt)
)
end
fun frame_tac side_tac ctxt = let
open Refine_Util Conv
val frame_rem_thms = @{thms frame_rem_thms}
@ Named_Theorems_Rev.get ctxt @{named_theorems_rev sepref_frame_rem_rules}
val solve_remainder_tac = TRY o REPEAT_ALL_NEW (DETERM o resolve_tac ctxt frame_rem_thms)
in
(prepare_frame_tac ctxt
THEN' resolve_tac ctxt @{thms ent_star_mono entt_star_mono})
THEN_ALL_NEW_LIST [
frame_loop_tac side_tac ctxt,
solve_remainder_tac
]
end
fun merge_tac side_tac ctxt = let
open Refine_Util Conv
val merge_conv = arg1_conv (binop_conv (reorder_ctxt_conv ctxt))
in
CONVERSION Thm.eta_conversion THEN'
CONCL_COND' is_merge THEN'
simp_tac (put_simpset HOL_basic_ss ctxt addsimps @{thms star_aci}) THEN'
CONVERSION (HOL_concl_conv (fn _ => merge_conv) ctxt) THEN'
frame_loop_tac side_tac ctxt
end
val setup = normrel_eqs.setup
local
open Sepref_Basic
fun is_invalid @{mpat "hn_invalid _ _ _ :: assn"} = true | is_invalid _ = false
fun contains_invalid @{mpat "Trueprop (RECOVER_PURE ?Q _)"} = exists is_invalid (strip_star Q)
| contains_invalid _ = false
in
fun recover_pure_tac ctxt =
CONCL_COND' contains_invalid THEN_ELSE' (
REPEAT_ALL_NEW (DETERM o (resolve_tac ctxt @{thms recover_pure} ORELSE' Sepref_Constraints.constraint_tac ctxt)),
resolve_tac ctxt @{thms recover_pure_triv}
)
end
local
open Sepref_Basic Refine_Util
datatype cte = Other of term | Hn of term * term * term
fun dest_ctxt_elem @{mpat "hn_ctxt ?R ?a ?c"} = Hn (R,a,c)
| dest_ctxt_elem t = Other t
fun mk_ctxt_elem (Other t) = t
| mk_ctxt_elem (Hn (R,a,c)) = @{mk_term "hn_ctxt ?R ?a ?c"}
fun match x (Hn (_,y,_)) = x aconv y
| match _ _ = false
fun dest_with_frame _ t = let
val (P,c,Q,R,a) = dest_hn_refine t
val (_,(_,args)) = dest_hnr_absfun a
val pre_ctes = strip_star P |> map dest_ctxt_elem
val (pre_args,frame) =
(case split_matching match args pre_ctes of
NONE => raise TERM("align_conv: Could not match all arguments",[P,a])
| SOME x => x)
in
((frame,pre_args),c,Q,R,a)
end
fun align_goal_conv_aux ctxt t = let
val ((frame,pre_args),c,Q,R,a) = dest_with_frame ctxt t
val P' = apply2 (list_star o map mk_ctxt_elem) (frame,pre_args) |> mk_star
val t' = mk_hn_refine (P',c,Q,R,a)
in t' end
fun align_rl_conv_aux ctxt t = let
val ((frame,pre_args),c,Q,R,a) = dest_with_frame ctxt t
val _ = frame = [] orelse raise TERM ("align_rl_conv: Extra preconditions in rule",[t,list_star (map mk_ctxt_elem frame)])
val P' = list_star (map mk_ctxt_elem pre_args)
val t' = mk_hn_refine (P',c,Q,R,a)
in t' end
fun normrel_conv ctxt = let
val ss = put_simpset HOL_basic_ss ctxt addsimps normrel_eqs.get ctxt
in
Simplifier.rewrite ss
end
in
fun align_goal_conv ctxt = f_tac_conv ctxt (align_goal_conv_aux ctxt) (star_permute_tac ctxt)
fun norm_goal_pre_conv ctxt = let
open Conv
val nr_conv = normrel_conv ctxt
in
HOL_concl_conv (fn _ => hn_refine_conv nr_conv all_conv all_conv all_conv all_conv) ctxt
end
fun norm_goal_pre_tac ctxt = CONVERSION (norm_goal_pre_conv ctxt)
fun align_rl_conv ctxt = let
open Conv
val nr_conv = normrel_conv ctxt
in
HOL_concl_conv (fn ctxt => f_tac_conv ctxt (align_rl_conv_aux ctxt) (star_permute_tac ctxt)) ctxt
then_conv HOL_concl_conv (K (hn_refine_conv nr_conv all_conv nr_conv nr_conv all_conv)) ctxt
end
fun align_goal_tac ctxt =
CONCL_COND' is_hn_refine_concl
THEN' DCONVERSION ctxt (HOL_concl_conv align_goal_conv ctxt)
end
fun weaken_post_tac ctxt = TRADE (fn ctxt =>
resolve_tac ctxt @{thms weaken_hnr_postI}
THEN' SOLVED' (REPEAT_ALL_NEW (DETERM o resolve_tac ctxt @{thms weaken_hnr_post weaken_hnr_post_triv}))
) ctxt
end
›
setup Sepref_Frame.setup
method_setup weaken_hnr_post = ‹Scan.succeed (fn ctxt => SIMPLE_METHOD' (Sepref_Frame.weaken_post_tac ctxt))›
‹Convert "hn_invalid" to "hn_ctxt (λ_ _. true)" in postcondition of hn_refine goal›
method extract_hnr_invalids = (
rule hn_refine_preI,
((drule mod_starD hn_invalidI | elim conjE exE)+)?
)
lemmas [sepref_frame_normrel_eqs] = the_pure_pure pure_the_pure
end
Theory Sepref_Rules
section ‹Refinement Rule Management›
theory Sepref_Rules
imports Sepref_Basic Sepref_Constraints
begin
text ‹This theory contains tools for managing the refinement rules used by Sepref›
text ‹The theories are based on uncurried functions, i.e.,
every function has type @{typ "'a⇒'b"}, where @{typ 'a} is the
tuple of parameters, or unit if there are none.
›
subsection ‹Assertion Interface Binding›
text ‹Binding of interface types to refinement assertions›
definition intf_of_assn :: "('a ⇒ _ ⇒ assn) ⇒ 'b itself ⇒ bool" where
[simp]: "intf_of_assn a b = True"
lemma intf_of_assnI: "intf_of_assn R TYPE('a)" by simp
named_theorems_rev intf_of_assn ‹Links between refinement assertions and interface types›
lemma intf_of_assn_fallback: "intf_of_assn (R :: 'a ⇒ _ ⇒ assn) TYPE('a)" by simp
subsection ‹Function Refinement with Precondition›
definition fref :: "('c ⇒ bool) ⇒ ('a × 'c) set ⇒ ('b × 'd) set
⇒ (('a ⇒ 'b) × ('c ⇒ 'd)) set"
("[_]⇩f _ → _" [0,60,60] 60)
where "[P]⇩f R → S ≡ {(f,g). ∀x y. P y ∧ (x,y)∈R ⟶ (f x, g y)∈S}"
abbreviation freft ("_ →⇩f _" [60,60] 60) where "R →⇩f S ≡ ([λ_. True]⇩f R → S)"
lemma rel2p_fref[rel2p]: "rel2p (fref P R S)
= (λf g. (∀x y. P y ⟶ rel2p R x y ⟶ rel2p S (f x) (g y)))"
by (auto simp: fref_def rel2p_def[abs_def])
lemma fref_cons:
assumes "(f,g) ∈ [P]⇩f R → S"
assumes "⋀c a. (c,a)∈R' ⟹ Q a ⟹ P a"
assumes "R' ⊆ R"
assumes "S ⊆ S'"
shows "(f,g) ∈ [Q]⇩f R' → S'"
using assms
unfolding fref_def
by fastforce
lemmas fref_cons' = fref_cons[OF _ _ order_refl order_refl]
lemma frefI[intro?]:
assumes "⋀x y. ⟦P y; (x,y)∈R⟧ ⟹ (f x, g y)∈S"
shows "(f,g)∈fref P R S"
using assms
unfolding fref_def
by auto
lemma fref_ncI: "(f,g)∈R→S ⟹ (f,g)∈R→⇩fS"
apply (rule frefI)
apply parametricity
done
lemma frefD:
assumes "(f,g)∈fref P R S"
shows "⟦P y; (x,y)∈R⟧ ⟹ (f x, g y)∈S"
using assms
unfolding fref_def
by auto
lemma fref_ncD: "(f,g)∈R→⇩fS ⟹ (f,g)∈R→S"
apply (rule fun_relI)
apply (drule frefD)
apply simp
apply assumption+
done
lemma fref_compI:
"fref P R1 R2 O fref Q S1 S2 ⊆
fref (λx. Q x ∧ (∀y. (y,x)∈S1 ⟶ P y)) (R1 O S1) (R2 O S2)"
unfolding fref_def
apply (auto)
apply blast
done
lemma fref_compI':
"⟦ (f,g)∈fref P R1 R2; (g,h)∈fref Q S1 S2 ⟧
⟹ (f,h) ∈ fref (λx. Q x ∧ (∀y. (y,x)∈S1 ⟶ P y)) (R1 O S1) (R2 O S2)"
using fref_compI[of P R1 R2 Q S1 S2]
by auto
lemma fref_unit_conv:
"(λ_. c, λ_. a) ∈ fref P unit_rel S ⟷ (P () ⟶ (c,a)∈S)"
by (auto simp: fref_def)
lemma fref_uncurry_conv:
"(uncurry c, uncurry a) ∈ fref P (R1×⇩rR2) S
⟷ (∀x1 y1 x2 y2. P (y1,y2) ⟶ (x1,y1)∈R1 ⟶ (x2,y2)∈R2 ⟶ (c x1 x2, a y1 y2) ∈ S)"
by (auto simp: fref_def)
lemma fref_mono: "⟦ ⋀x. P' x ⟹ P x; R' ⊆ R; S ⊆ S' ⟧
⟹ fref P R S ⊆ fref P' R' S'"
unfolding fref_def
by auto blast
lemma fref_composeI:
assumes FR1: "(f,g)∈fref P R1 R2"
assumes FR2: "(g,h)∈fref Q S1 S2"
assumes C1: "⋀x. P' x ⟹ Q x"
assumes C2: "⋀x y. ⟦P' x; (y,x)∈S1⟧ ⟹ P y"
assumes R1: "R' ⊆ R1 O S1"
assumes R2: "R2 O S2 ⊆ S'"
assumes FH: "f'=f" "h'=h"
shows "(f',h') ∈ fref P' R' S'"
unfolding FH
apply (rule subsetD[OF fref_mono fref_compI'[OF FR1 FR2]])
using C1 C2 apply blast
using R1 apply blast
using R2 apply blast
done
lemma fref_triv: "A⊆Id ⟹ (f,f)∈[P]⇩f A → Id"
by (auto simp: fref_def)
subsection ‹Heap-Function Refinement›
text ‹
The following relates a heap-function with a pure function.
It contains a precondition, a refinement assertion for the arguments
before and after execution, and a refinement relation for the result.
›
definition hfref
:: "
('a ⇒ bool)
⇒ (('a ⇒ 'ai ⇒ assn) × ('a ⇒ 'ai ⇒ assn))
⇒ ('b ⇒ 'bi ⇒ assn)
⇒ (('ai ⇒ 'bi Heap) × ('a⇒'b nres)) set"
("[_]⇩a _ → _" [0,60,60] 60)
where
"[P]⇩a RS → T ≡ { (f,g) . ∀c a. P a ⟶ hn_refine (fst RS a c) (f c) (snd RS a c) T (g a)}"
abbreviation hfreft ("_ →⇩a _" [60,60] 60) where "RS →⇩a T ≡ ([λ_. True]⇩a RS → T)"
lemma hfrefI[intro?]:
assumes "⋀c a. P a ⟹ hn_refine (fst RS a c) (f c) (snd RS a c) T (g a)"
shows "(f,g)∈hfref P RS T"
using assms unfolding hfref_def by blast
lemma hfrefD:
assumes "(f,g)∈hfref P RS T"
shows "⋀c a. P a ⟹ hn_refine (fst RS a c) (f c) (snd RS a c) T (g a)"
using assms unfolding hfref_def by blast
lemma hfref_to_ASSERT_conv:
"NO_MATCH (λ_. True) P ⟹ (a,b)∈[P]⇩a R → S ⟷ (a,λx. ASSERT (P x) ⪢ b x) ∈ R →⇩a S"
unfolding hfref_def
apply (clarsimp; safe; clarsimp?)
apply (rule hn_refine_nofailI)
apply (simp add: refine_pw_simps)
subgoal for xc xa
apply (drule spec[of _ xc])
apply (drule spec[of _ xa])
by simp
done
text ‹
A pair of argument refinement assertions can be created by the
input assertion and the information whether the parameter is kept or destroyed
by the function.
›
primrec hf_pres
:: "('a ⇒ 'b ⇒ assn) ⇒ bool ⇒ ('a ⇒ 'b ⇒ assn)×('a ⇒ 'b ⇒ assn)"
where
"hf_pres R True = (R,R)" | "hf_pres R False = (R,invalid_assn R)"
abbreviation hfkeep
:: "('a ⇒ 'b ⇒ assn) ⇒ ('a ⇒ 'b ⇒ assn)×('a ⇒ 'b ⇒ assn)"
("(_⇧k)" [1000] 999)
where "R⇧k ≡ hf_pres R True"
abbreviation hfdrop
:: "('a ⇒ 'b ⇒ assn) ⇒ ('a ⇒ 'b ⇒ assn)×('a ⇒ 'b ⇒ assn)"
("(_⇧d)" [1000] 999)
where "R⇧d ≡ hf_pres R False"
abbreviation "hn_kede R kd ≡ hn_ctxt (snd (hf_pres R kd))"
abbreviation "hn_keep R ≡ hn_kede R True"
abbreviation "hn_dest R ≡ hn_kede R False"
lemma keep_drop_sels[simp]:
"fst (R⇧k) = R"
"snd (R⇧k) = R"
"fst (R⇧d) = R"
"snd (R⇧d) = invalid_assn R"
by auto
lemma hf_pres_fst[simp]: "fst (hf_pres R k) = R" by (cases k) auto
text ‹
The following operator combines multiple argument assertion-pairs to
argument assertion-pairs for the product. It is required to state
argument assertion-pairs for uncurried functions.
›
definition hfprod :: "
(('a ⇒ 'b ⇒ assn)×('a ⇒ 'b ⇒ assn))
⇒ (('c ⇒ 'd ⇒ assn)×('c ⇒ 'd ⇒ assn))
⇒ ((('a×'c) ⇒ ('b × 'd) ⇒ assn) × (('a×'c) ⇒ ('b × 'd) ⇒ assn))"
(infixl "*⇩a" 65)
where "RR *⇩a SS ≡ (prod_assn (fst RR) (fst SS), prod_assn (snd RR) (snd SS))"
lemma hfprod_fst_snd[simp]:
"fst (A *⇩a B) = prod_assn (fst A) (fst B)"
"snd (A *⇩a B) = prod_assn (snd A) (snd B)"
unfolding hfprod_def by auto
subsubsection ‹Conversion from fref to hfref›
lemma fref_to_pure_hfref':
assumes "(f,g) ∈ [P]⇩f R→⟨S⟩nres_rel"
assumes "⋀x. x∈Domain R ∩ R¯``Collect P ⟹ f x = RETURN (f' x)"
shows "(return o f', g) ∈ [P]⇩a (pure R)⇧k→pure S"
apply (rule hfrefI) apply (rule hn_refineI)
using assms
apply ((sep_auto simp: fref_def pure_def pw_le_iff pw_nres_rel_iff
refine_pw_simps eintros del: exI))
apply force
done
subsubsection ‹Conversion from hfref to hnr›
text ‹This section contains the lemmas. The ML code is further down. ›
lemma hf2hnr:
assumes "(f,g) ∈ [P]⇩a R → S"
shows "∀x xi. P x ⟶ hn_refine (emp * hn_ctxt (fst R) x xi) (f$xi) (emp * hn_ctxt (snd R) x xi) S (g$x)"
using assms
unfolding hfref_def
by (auto simp: hn_ctxt_def)
definition [simp]: "to_hnr_prod ≡ prod_assn"
lemma to_hnr_prod_fst_snd:
"fst (A *⇩a B) = to_hnr_prod (fst A) (fst B)"
"snd (A *⇩a B) = to_hnr_prod (snd A) (snd B)"
unfolding hfprod_def by auto
lemma hnr_uncurry_unfold: "
(∀x xi. P x ⟶
hn_refine
(Γ * hn_ctxt (to_hnr_prod A B) x xi)
(fi xi)
(Γ' * hn_ctxt (to_hnr_prod A' B') x xi)
R
(f x))
⟷ (∀b bi a ai. P (a,b) ⟶
hn_refine
(Γ * hn_ctxt B b bi * hn_ctxt A a ai)
(fi (ai,bi))
(Γ' * hn_ctxt B' b bi * hn_ctxt A' a ai)
R
(f (a,b))
)"
by (auto simp: hn_ctxt_def prod_assn_def star_aci)
lemma hnr_intro_dummy:
"∀x xi. P x ⟶ hn_refine (Γ x xi) (c xi) (Γ' x xi) R (a x) ⟹ ∀x xi. P x ⟶ hn_refine (emp*Γ x xi) (c xi) (emp*Γ' x xi) R (a x)"
by simp
lemma hn_ctxt_ctxt_fix_conv: "hn_ctxt (hn_ctxt R) = hn_ctxt R"
by (simp add: hn_ctxt_def[abs_def])
lemma uncurry_APP: "uncurry f$(a,b) = f$a$b" by auto
lemma norm_RETURN_o:
"⋀f. (RETURN o f)$x = (RETURN$(f$x))"
"⋀f. (RETURN oo f)$x$y = (RETURN$(f$x$y))"
"⋀f. (RETURN ooo f)$x$y$z = (RETURN$(f$x$y$z))"
"⋀f. (λx. RETURN ooo f x)$x$y$z$a = (RETURN$(f$x$y$z$a))"
"⋀f. (λx y. RETURN ooo f x y)$x$y$z$a$b = (RETURN$(f$x$y$z$a$b))"
by auto
lemma norm_return_o:
"⋀f. (return o f)$x = (return$(f$x))"
"⋀f. (return oo f)$x$y = (return$(f$x$y))"
"⋀f. (return ooo f)$x$y$z = (return$(f$x$y$z))"
"⋀f. (λx. return ooo f x)$x$y$z$a = (return$(f$x$y$z$a))"
"⋀f. (λx y. return ooo f x y)$x$y$z$a$b = (return$(f$x$y$z$a$b))"
by auto
lemma hn_val_unit_conv_emp[simp]: "hn_val unit_rel x y = emp"
by (auto simp: hn_ctxt_def pure_def)
subsubsection ‹Conversion from hnr to hfref›
text ‹This section contains the lemmas. The ML code is further down. ›
abbreviation "id_assn ≡ pure Id"
abbreviation "unit_assn ≡ id_assn :: unit ⇒ _"
lemma pure_unit_rel_eq_empty: "unit_assn x y = emp"
by (auto simp: pure_def)
lemma uc_hfprod_sel:
"fst (A *⇩a B) a c = (case (a,c) of ((a1,a2),(c1,c2)) ⇒ fst A a1 c1 * fst B a2 c2)"
"snd (A *⇩a B) a c = (case (a,c) of ((a1,a2),(c1,c2)) ⇒ snd A a1 c1 * snd B a2 c2)"
unfolding hfprod_def prod_assn_def[abs_def] by auto
subsubsection ‹Conversion from relation to fref›
text ‹This section contains the lemmas. The ML code is further down. ›
definition "CURRY R ≡ { (f,g). (uncurry f, uncurry g) ∈ R }"
lemma fref_param1: "R→S = fref (λ_. True) R S"
by (auto simp: fref_def fun_relD)
lemma fref_nest: "fref P1 R1 (fref P2 R2 S)
≡ CURRY (fref (λ(a,b). P1 a ∧ P2 b) (R1×⇩rR2) S)"
apply (rule eq_reflection)
by (auto simp: fref_def CURRY_def)
lemma in_CURRY_conv: "(f,g) ∈ CURRY R ⟷ (uncurry f, uncurry g) ∈ R"
unfolding CURRY_def by auto
lemma uncurry0_APP[simp]: "uncurry0 c $ x = c" by auto
lemma fref_param0I: "(c,a)∈R ⟹ (uncurry0 c, uncurry0 a) ∈ fref (λ_. True) unit_rel R"
by (auto simp: fref_def)
subsubsection ‹Composition›
definition hr_comp :: "('b ⇒ 'c ⇒ assn) ⇒ ('b × 'a) set ⇒ 'a ⇒ 'c ⇒ assn"
where "hr_comp R1 R2 a c ≡ ∃⇩Ab. R1 b c * ↑((b,a)∈R2)"
definition hrp_comp
:: "('d ⇒ 'b ⇒ assn) × ('d ⇒ 'c ⇒ assn)
⇒ ('d × 'a) set ⇒ ('a ⇒ 'b ⇒ assn) × ('a ⇒ 'c ⇒ assn)"
where "hrp_comp RR' S ≡ (hr_comp (fst RR') S, hr_comp (snd RR') S) "
lemma hr_compI: "(b,a)∈R2 ⟹ R1 b c ⟹⇩A hr_comp R1 R2 a c"
unfolding hr_comp_def
by sep_auto
lemma hr_comp_Id1[simp]: "hr_comp (pure Id) R = pure R"
unfolding hr_comp_def[abs_def] pure_def
apply (intro ext ent_iffI)
by sep_auto+
lemma hr_comp_Id2[simp]: "hr_comp R Id = R"
unfolding hr_comp_def[abs_def]
apply (intro ext ent_iffI)
by sep_auto+
lemma hr_comp_emp[simp]: "hr_comp (λa c. emp) R a c = ↑(∃b. (b,a)∈R)"
unfolding hr_comp_def[abs_def]
apply (intro ext ent_iffI)
apply sep_auto+
done
lemma hr_comp_prod_conv[simp]:
"hr_comp (prod_assn Ra Rb) (Ra' ×⇩r Rb')
= prod_assn (hr_comp Ra Ra') (hr_comp Rb Rb')"
unfolding hr_comp_def[abs_def] prod_assn_def[abs_def]
apply (intro ext ent_iffI)
apply solve_entails apply clarsimp apply sep_auto
apply clarsimp apply (intro ent_ex_preI)
apply (rule ent_ex_postI) apply (sep_auto split: prod.splits)
done
lemma hr_comp_pure: "hr_comp (pure R) S = pure (R O S)"
apply (intro ext)
apply (rule ent_iffI)
unfolding hr_comp_def[abs_def]
apply (sep_auto simp: pure_def)+
done
lemma hr_comp_is_pure[safe_constraint_rules]: "is_pure A ⟹ is_pure (hr_comp A B)"
by (auto simp: hr_comp_pure is_pure_conv)
lemma hr_comp_the_pure: "is_pure A ⟹ the_pure (hr_comp A B) = the_pure A O B"
unfolding is_pure_conv
by (clarsimp simp: hr_comp_pure)
lemma rdomp_hrcomp_conv: "rdomp (hr_comp A R) x ⟷ (∃y. rdomp A y ∧ (y,x)∈R)"
by (auto simp: rdomp_def hr_comp_def)
lemma hn_rel_compI:
"⟦nofail a; (b,a)∈⟨R2⟩nres_rel⟧ ⟹ hn_rel R1 b c ⟹⇩A hn_rel (hr_comp R1 R2) a c"
unfolding hr_comp_def hn_rel_def nres_rel_def
apply (clarsimp intro!: ent_ex_preI)
apply (drule (1) order_trans)
apply (simp add: ret_le_down_conv)
by sep_auto
lemma hr_comp_precise[constraint_rules]:
assumes [safe_constraint_rules]: "precise R"
assumes SV: "single_valued S"
shows "precise (hr_comp R S)"
apply (rule preciseI)
unfolding hr_comp_def
apply clarsimp
by (metis SV assms(1) preciseD single_valuedD)
lemma hr_comp_assoc: "hr_comp (hr_comp R S) T = hr_comp R (S O T)"
apply (intro ext)
unfolding hr_comp_def
apply (rule ent_iffI; clarsimp)
apply sep_auto
apply (rule ent_ex_preI; clarsimp)
apply sep_auto
done
lemma hnr_comp:
assumes R: "⋀b1 c1. P b1 ⟹ hn_refine (R1 b1 c1 * Γ) (c c1) (R1p b1 c1 * Γ') R (b b1)"
assumes S: "⋀a1 b1. ⟦Q a1; (b1,a1)∈R1'⟧ ⟹ (b b1,a a1)∈⟨R'⟩nres_rel"
assumes PQ: "⋀a1 b1. ⟦Q a1; (b1,a1)∈R1'⟧ ⟹ P b1"
assumes Q: "Q a1"
shows "hn_refine
(hr_comp R1 R1' a1 c1 * Γ)
(c c1)
(hr_comp R1p R1' a1 c1 * Γ')
(hr_comp R R')
(a a1)"
unfolding hn_refine_alt
proof clarsimp
assume NF: "nofail (a a1)"
show "
<hr_comp R1 R1' a1 c1 * Γ>
c c1
<λr. hn_rel (hr_comp R R') (a a1) r * (hr_comp R1p R1' a1 c1 * Γ')>⇩t"
apply (subst hr_comp_def)
apply (clarsimp intro!: norm_pre_ex_rule)
proof -
fix b1
assume R1: "(b1, a1) ∈ R1'"
from S R1 Q have R': "(b b1, a a1) ∈ ⟨R'⟩nres_rel" by blast
with NF have NFB: "nofail (b b1)"
by (simp add: nres_rel_def pw_le_iff refine_pw_simps)
from PQ R1 Q have P: "P b1" by blast
with NFB R have "<R1 b1 c1 * Γ> c c1 <λr. hn_rel R (b b1) r * (R1p b1 c1 * Γ')>⇩t"
unfolding hn_refine_alt by auto
thus "<R1 b1 c1 * Γ>
c c1
<λr. hn_rel (hr_comp R R') (a a1) r * (hr_comp R1p R1' a1 c1 * Γ')>⇩t"
apply (rule cons_post_rule)
apply (solve_entails)
by (intro ent_star_mono hn_rel_compI[OF NF R'] hr_compI[OF R1] ent_refl)
qed
qed
lemma hnr_comp1_aux:
assumes R: "⋀b1 c1. P b1 ⟹ hn_refine (hn_ctxt R1 b1 c1) (c c1) (hn_ctxt R1p b1 c1) R (b$b1)"
assumes S: "⋀a1 b1. ⟦Q a1; (b1,a1)∈R1'⟧ ⟹ (b$b1,a$a1)∈⟨R'⟩nres_rel"
assumes PQ: "⋀a1 b1. ⟦Q a1; (b1,a1)∈R1'⟧ ⟹ P b1"
assumes Q: "Q a1"
shows "hn_refine
(hr_comp R1 R1' a1 c1)
(c c1)
(hr_comp R1p R1' a1 c1)
(hr_comp R R')
(a a1)"
using assms hnr_comp[where Γ=emp and Γ'=emp and a=a and b=b and c=c and P=P and Q=Q]
unfolding hn_ctxt_def
by auto
lemma hfcomp:
assumes A: "(f,g) ∈ [P]⇩a RR' → S"
assumes B: "(g,h) ∈ [Q]⇩f T → ⟨U⟩nres_rel"
shows "(f,h) ∈ [λa. Q a ∧ (∀a'. (a',a)∈T ⟶ P a')]⇩a
hrp_comp RR' T → hr_comp S U"
using assms
unfolding fref_def hfref_def hrp_comp_def
apply clarsimp
apply (rule hnr_comp1_aux[of
P "fst RR'" f "snd RR'" S g "λa. Q a ∧ (∀a'. (a',a)∈T ⟶ P a')" T h U])
apply (auto simp: hn_ctxt_def)
done
lemma hfref_weaken_pre_nofail:
assumes "(f,g) ∈ [P]⇩a R → S"
shows "(f,g) ∈ [λx. nofail (g x) ⟶ P x]⇩a R → S"
using assms
unfolding hfref_def hn_refine_def
by auto
lemma hfref_cons:
assumes "(f,g) ∈ [P]⇩a R → S"
assumes "⋀x. P' x ⟹ P x"
assumes "⋀x y. fst R' x y ⟹⇩t fst R x y"
assumes "⋀x y. snd R x y ⟹⇩t snd R' x y"
assumes "⋀x y. S x y ⟹⇩t S' x y"
shows "(f,g) ∈ [P']⇩a R' → S'"
unfolding hfref_def
apply clarsimp
apply (rule hn_refine_cons)
apply (rule assms(3))
defer
apply (rule entt_trans[OF assms(4)]; sep_auto)
apply (rule assms(5))
apply (frule assms(2))
using assms(1)
unfolding hfref_def
apply auto
done
subsubsection ‹Composition Automation›
text ‹This section contains the lemmas. The ML code is further down. ›
lemma prod_hrp_comp:
"hrp_comp (A *⇩a B) (C ×⇩r D) = hrp_comp A C *⇩a hrp_comp B D"
unfolding hrp_comp_def hfprod_def by simp
lemma hrp_comp_keep: "hrp_comp (A⇧k) B = (hr_comp A B)⇧k"
by (auto simp: hrp_comp_def)
lemma hr_comp_invalid: "hr_comp (invalid_assn R1) R2 = invalid_assn (hr_comp R1 R2)"
apply (intro ent_iffI entailsI ext)
unfolding invalid_assn_def hr_comp_def
by auto
lemma hrp_comp_dest: "hrp_comp (A⇧d) B = (hr_comp A B)⇧d"
by (auto simp: hrp_comp_def hr_comp_invalid)
definition "hrp_imp RR RR' ≡
∀a b. (fst RR' a b ⟹⇩t fst RR a b) ∧ (snd RR a b ⟹⇩t snd RR' a b)"
lemma hfref_imp: "hrp_imp RR RR' ⟹ [P]⇩a RR → S ⊆ [P]⇩a RR' → S"
apply clarsimp
apply (erule hfref_cons)
apply (simp_all add: hrp_imp_def)
done
lemma hrp_imp_refl: "hrp_imp RR RR"
unfolding hrp_imp_def by auto
lemma hrp_imp_reflI: "RR = RR' ⟹ hrp_imp RR RR'"
unfolding hrp_imp_def by auto
lemma hrp_comp_cong: "hrp_imp A A' ⟹ B=B' ⟹ hrp_imp (hrp_comp A B) (hrp_comp A' B')"
by (sep_auto simp: hrp_imp_def hrp_comp_def hr_comp_def entailst_def)
lemma hrp_prod_cong: "hrp_imp A A' ⟹ hrp_imp B B' ⟹ hrp_imp (A*⇩aB) (A'*⇩aB')"
by (sep_auto simp: hrp_imp_def prod_assn_def intro: entt_star_mono)
lemma hrp_imp_trans: "hrp_imp A B ⟹ hrp_imp B C ⟹ hrp_imp A C"
unfolding hrp_imp_def
by (fastforce intro: entt_trans)
lemma fcomp_norm_dflt_init: "x∈[P]⇩a R → T ⟹ hrp_imp R S ⟹ x∈[P]⇩a S → T"
apply (erule rev_subsetD)
by (rule hfref_imp)
definition "comp_PRE R P Q S ≡ λx. S x ⟶ (P x ∧ (∀y. (y,x)∈R ⟶ Q x y))"
lemma comp_PRE_cong[cong]:
assumes "R≡R'"
assumes "⋀x. P x ≡ P' x"
assumes "⋀x. S x ≡ S' x"
assumes "⋀x y. ⟦P x; (y,x)∈R; y∈Domain R; S' x ⟧ ⟹ Q x y ≡ Q' x y"
shows "comp_PRE R P Q S ≡ comp_PRE R' P' Q' S'"
using assms
by (fastforce simp: comp_PRE_def intro!: eq_reflection ext)
lemma fref_compI_PRE:
"⟦ (f,g)∈fref P R1 R2; (g,h)∈fref Q S1 S2 ⟧
⟹ (f,h) ∈ fref (comp_PRE S1 Q (λ_. P) (λ_. True)) (R1 O S1) (R2 O S2)"
using fref_compI[of P R1 R2 Q S1 S2]
unfolding comp_PRE_def
by auto
lemma PRE_D1: "(Q x ∧ P x) ⟶ comp_PRE S1 Q (λx _. P x) S x"
by (auto simp: comp_PRE_def)
lemma PRE_D2: "(Q x ∧ (∀y. (y,x)∈S1 ⟶ S x ⟶ P x y)) ⟶ comp_PRE S1 Q P S x"
by (auto simp: comp_PRE_def)
lemma fref_weaken_pre:
assumes "⋀x. P x ⟶ P' x"
assumes "(f,h) ∈ fref P' R S"
shows "(f,h) ∈ fref P R S"
apply (rule rev_subsetD[OF assms(2) fref_mono])
using assms(1) by auto
lemma fref_PRE_D1:
assumes "(f,h) ∈ fref (comp_PRE S1 Q (λx _. P x) X) R S"
shows "(f,h) ∈ fref (λx. Q x ∧ P x) R S"
by (rule fref_weaken_pre[OF PRE_D1 assms])
lemma fref_PRE_D2:
assumes "(f,h) ∈ fref (comp_PRE S1 Q P X) R S"
shows "(f,h) ∈ fref (λx. Q x ∧ (∀y. (y,x)∈S1 ⟶ X x ⟶ P x y)) R S"
by (rule fref_weaken_pre[OF PRE_D2 assms])
lemmas fref_PRE_D = fref_PRE_D1 fref_PRE_D2
lemma hfref_weaken_pre:
assumes "⋀x. P x ⟶ P' x"
assumes "(f,h) ∈ hfref P' R S"
shows "(f,h) ∈ hfref P R S"
using assms
by (auto simp: hfref_def)
lemma hfref_weaken_pre':
assumes "⋀x. ⟦P x; rdomp (fst R) x⟧ ⟹ P' x"
assumes "(f,h) ∈ hfref P' R S"
shows "(f,h) ∈ hfref P R S"
apply (rule hfrefI)
apply (rule hn_refine_preI)
using assms
by (auto simp: hfref_def rdomp_def)
lemma hfref_weaken_pre_nofail':
assumes "(f,g) ∈ [P]⇩a R → S"
assumes "⋀x. ⟦nofail (g x); Q x⟧ ⟹ P x"
shows "(f,g) ∈ [Q]⇩a R → S"
apply (rule hfref_weaken_pre[OF _ assms(1)[THEN hfref_weaken_pre_nofail]])
using assms(2)
by blast
lemma hfref_compI_PRE_aux:
assumes A: "(f,g) ∈ [P]⇩a RR' → S"
assumes B: "(g,h) ∈ [Q]⇩f T → ⟨U⟩nres_rel"
shows "(f,h) ∈ [comp_PRE T Q (λ_. P) (λ_. True)]⇩a
hrp_comp RR' T → hr_comp S U"
apply (rule hfref_weaken_pre[OF _ hfcomp[OF A B]])
by (auto simp: comp_PRE_def)
lemma hfref_compI_PRE:
assumes A: "(f,g) ∈ [P]⇩a RR' → S"
assumes B: "(g,h) ∈ [Q]⇩f T → ⟨U⟩nres_rel"
shows "(f,h) ∈ [comp_PRE T Q (λx y. P y) (λx. nofail (h x))]⇩a
hrp_comp RR' T → hr_comp S U"
using hfref_compI_PRE_aux[OF A B, THEN hfref_weaken_pre_nofail]
apply (rule hfref_weaken_pre[rotated])
apply (auto simp: comp_PRE_def)
done
lemma hfref_PRE_D1:
assumes "(f,h) ∈ hfref (comp_PRE S1 Q (λx _. P x) X) R S"
shows "(f,h) ∈ hfref (λx. Q x ∧ P x) R S"
by (rule hfref_weaken_pre[OF PRE_D1 assms])
lemma hfref_PRE_D2:
assumes "(f,h) ∈ hfref (comp_PRE S1 Q P X) R S"
shows "(f,h) ∈ hfref (λx. Q x ∧ (∀y. (y,x)∈S1 ⟶ X x ⟶ P x y)) R S"
by (rule hfref_weaken_pre[OF PRE_D2 assms])
lemma hfref_PRE_D3:
assumes "(f,h) ∈ hfref (comp_PRE S1 Q P X) R S"
shows "(f,h) ∈ hfref (comp_PRE S1 Q P X) R S"
using assms .
lemmas hfref_PRE_D = hfref_PRE_D1 hfref_PRE_D3
subsection ‹Automation›
text ‹Purity configuration for constraint solver›
lemmas [safe_constraint_rules] = pure_pure
text ‹Configuration for hfref to hnr conversion›
named_theorems to_hnr_post ‹to_hnr converter: Postprocessing unfold rules›
lemma uncurry0_add_app_tag: "uncurry0 (RETURN c) = uncurry0 (RETURN$c)" by simp
lemmas [to_hnr_post] = norm_RETURN_o norm_return_o
uncurry0_add_app_tag uncurry0_apply uncurry0_APP hn_val_unit_conv_emp
mult_1[of "x::assn" for x] mult_1_right[of "x::assn" for x]
named_theorems to_hfref_post ‹to_hfref converter: Postprocessing unfold rules›
lemma prod_casesK[to_hfref_post]: "case_prod (λ_ _. k) = (λ_. k)" by auto
lemma uncurry0_hfref_post[to_hfref_post]: "hfref (uncurry0 True) R S = hfref (λ_. True) R S"
apply (fo_rule arg_cong fun_cong)+ by auto
text ‹Configuration for relation normalization after composition›
named_theorems fcomp_norm_unfold ‹fcomp-normalizer: Unfold theorems›
named_theorems fcomp_norm_simps ‹fcomp-normalizer: Simplification theorems›
named_theorems fcomp_norm_init "fcomp-normalizer: Initialization rules"
named_theorems fcomp_norm_trans "fcomp-normalizer: Transitivity rules"
named_theorems fcomp_norm_cong "fcomp-normalizer: Congruence rules"
named_theorems fcomp_norm_norm "fcomp-normalizer: Normalization rules"
named_theorems fcomp_norm_refl "fcomp-normalizer: Reflexivity rules"
text ‹Default Setup›
lemmas [fcomp_norm_unfold] = prod_rel_comp nres_rel_comp Id_O_R R_O_Id
lemmas [fcomp_norm_unfold] = hr_comp_Id1 hr_comp_Id2
lemmas [fcomp_norm_unfold] = hr_comp_prod_conv
lemmas [fcomp_norm_unfold] = prod_hrp_comp hrp_comp_keep hrp_comp_dest hr_comp_pure
lemma [fcomp_norm_simps]: "CONSTRAINT is_pure P ⟹ pure (the_pure P) = P" by simp
lemmas [fcomp_norm_simps] = True_implies_equals
lemmas [fcomp_norm_init] = fcomp_norm_dflt_init
lemmas [fcomp_norm_trans] = hrp_imp_trans
lemmas [fcomp_norm_cong] = hrp_comp_cong hrp_prod_cong
lemmas [fcomp_norm_refl] = refl hrp_imp_refl
lemma ensure_fref_nresI: "(f,g)∈[P]⇩f R→S ⟹ (RETURN o f, RETURN o g)∈[P]⇩f R→⟨S⟩nres_rel"
by (auto intro: nres_relI simp: fref_def)
lemma ensure_fref_nres_unfold:
"⋀f. RETURN o (uncurry0 f) = uncurry0 (RETURN f)"
"⋀f. RETURN o (uncurry f) = uncurry (RETURN oo f)"
"⋀f. (RETURN ooo uncurry) f = uncurry (RETURN ooo f)"
by auto
text ‹Composed precondition normalizer›
named_theorems fcomp_prenorm_simps ‹fcomp precondition-normalizer: Simplification theorems›
text ‹Support for preconditions of the form ‹_∈Domain R›,
where ‹R› is the relation of the next more abstract level.›
declare DomainI[fcomp_prenorm_simps]
lemma auto_weaken_pre_init_hf:
assumes "⋀x. PROTECT P x ⟶ P' x"
assumes "(f,h) ∈ hfref P' R S"
shows "(f,h) ∈ hfref P R S"
using assms
by (auto simp: hfref_def)
lemma auto_weaken_pre_init_f:
assumes "⋀x. PROTECT P x ⟶ P' x"
assumes "(f,h) ∈ fref P' R S"
shows "(f,h) ∈ fref P R S"
using assms
by (auto simp: fref_def)
lemmas auto_weaken_pre_init = auto_weaken_pre_init_hf auto_weaken_pre_init_f
lemma auto_weaken_pre_uncurry_step:
assumes "PROTECT f a ≡ f'"
shows "PROTECT (λ(x,y). f x y) (a,b) ≡ f' b"
using assms
by (auto simp: curry_def dest!: meta_eq_to_obj_eq intro!: eq_reflection)
lemma auto_weaken_pre_uncurry_finish:
"PROTECT f x ≡ f x" by (auto)
lemma auto_weaken_pre_uncurry_start:
assumes "P ≡ P'"
assumes "P'⟶Q"
shows "P⟶Q"
using assms by (auto)
lemma auto_weaken_pre_comp_PRE_I:
assumes "S x ⟹ P x"
assumes "⋀y. ⟦(y,x)∈R; P x; S x⟧ ⟹ Q x y"
shows "comp_PRE R P Q S x"
using assms by (auto simp: comp_PRE_def)
lemma auto_weaken_pre_to_imp_nf:
"(A⟶B⟶C) = (A∧B ⟶ C)"
"((A∧B)∧C) = (A∧B∧C)"
by auto
lemma auto_weaken_pre_add_dummy_imp:
"P ⟹ True ⟶ P" by simp
text ‹Synthesis for hfref statements›
definition hfsynth_ID_R :: "('a ⇒ _ ⇒ assn) ⇒ 'a ⇒ bool" where
[simp]: "hfsynth_ID_R _ _ ≡ True"
lemma hfsynth_ID_R_D:
fixes I :: "'a itself"
assumes "hfsynth_ID_R R a"
assumes "intf_of_assn R I"
shows "a ::⇩i I"
by simp
lemma hfsynth_hnr_from_hfI:
assumes "∀x xi. P x ∧ hfsynth_ID_R (fst R) x ⟶ hn_refine (emp * hn_ctxt (fst R) x xi) (f$xi) (emp * hn_ctxt (snd R) x xi) S (g$x)"
shows "(f,g) ∈ [P]⇩a R → S"
using assms
unfolding hfref_def
by (auto simp: hn_ctxt_def)
lemma hfsynth_ID_R_uncurry_unfold:
"hfsynth_ID_R (to_hnr_prod R S) (a,b) ≡ hfsynth_ID_R R a ∧ hfsynth_ID_R S b"
"hfsynth_ID_R (fst (hf_pres R k)) ≡ hfsynth_ID_R R"
by (auto intro!: eq_reflection)
ML ‹
signature SEPREF_RULES = sig
val binder_rels: term -> term list
val body_rel: term -> term
val analyze_rel: term -> term option * term list * term
val mk_triv_precond: term list -> term
val mk_rel: term option * term list * term -> term
val strip_rel: term -> term list * term
val mk_hfprod : term * term -> term
val mk_hfprods : term list -> term
val intf_of_assn : Proof.context -> term -> typ
val to_fref : Proof.context -> thm -> thm
val to_foparam : Proof.context -> thm -> thm
val prepare_hfref_synth_tac : Proof.context -> tactic'
val to_hnr : Proof.context -> thm -> thm
val to_hfref: Proof.context -> thm -> thm
val ensure_fref : Proof.context -> thm -> thm
val ensure_fref_nres : Proof.context -> thm -> thm
val ensure_hfref : Proof.context -> thm -> thm
val ensure_hnr : Proof.context -> thm -> thm
type hnr_analysis = {
thm: thm,
precond: term,
prems : term list,
ahead: term * bool,
chead: term * bool,
argrels: (term * bool) list,
result_rel: term
}
val analyze_hnr: Proof.context -> thm -> hnr_analysis
val pretty_hnr_analysis: Proof.context -> hnr_analysis -> Pretty.T
val mk_hfref_thm: Proof.context -> hnr_analysis -> thm
val simplify_precond: Proof.context -> thm -> thm
val norm_fcomp_rule: Proof.context -> thm -> thm
val add_pure_constraints_rule: Proof.context -> thm -> thm
val gen_compose : Proof.context -> thm -> thm -> thm
val fcomp_attrib: attribute context_parser
end
structure Sepref_Rules: SEPREF_RULES = struct
local open Refine_Util Relators in
fun binder_rels @{mpat "?F → ?G"} = F::binder_rels G
| binder_rels @{mpat "fref _ ?F _"} = strip_prodrel_left F
| binder_rels _ = []
local
fun br_aux @{mpat "_ → ?G"} = br_aux G
| br_aux R = R
in
fun body_rel @{mpat "fref _ _ ?G"} = G
| body_rel R = br_aux R
end
fun strip_rel R = (binder_rels R, body_rel R)
fun analyze_rel @{mpat "fref (λ_. True) ?R ?S"} = (NONE,strip_prodrel_left R,S)
| analyze_rel @{mpat "fref ?P ?R ?S"} = (SOME P,strip_prodrel_left R,S)
| analyze_rel R = let
val (args,res) = strip_rel R
in
(NONE,args,res)
end
fun mk_triv_precond Rs = absdummy (map rel_absT Rs |> list_prodT_left) @{term True}
fun mk_rel (P,Rs,S) = let
val R = list_prodrel_left Rs
val P = case P of
SOME P => P
| NONE => mk_triv_precond Rs
in
@{mk_term "fref ?P ?R ?S"}
end
end
fun mk_hfprod (a, b) = @{mk_term "?a*⇩a?b"}
local
fun mk_hfprods_rev [] = @{mk_term "unit_assn⇧k"}
| mk_hfprods_rev [Rk] = Rk
| mk_hfprods_rev (Rkn::Rks) = mk_hfprod (mk_hfprods_rev Rks, Rkn)
in
val mk_hfprods = mk_hfprods_rev o rev
end
fun intf_of_assn ctxt t = let
val orig_ctxt = ctxt
val (t,ctxt) = yield_singleton (Variable.import_terms false) t ctxt
val v = TVar (("T",0),Proof_Context.default_sort ctxt ("T",0)) |> Logic.mk_type
val goal = @{mk_term "Trueprop (intf_of_assn ?t ?v)"}
val i_of_assn_rls =
Named_Theorems_Rev.get ctxt @{named_theorems_rev intf_of_assn}
@ @{thms intf_of_assn_fallback}
fun tac ctxt = REPEAT_ALL_NEW (resolve_tac ctxt i_of_assn_rls)
val thm = Goal.prove ctxt [] [] goal (fn {context,...} => ALLGOALS (tac context))
val intf = case Thm.concl_of thm of
@{mpat "Trueprop (intf_of_assn _ (?v AS⇩p TYPE (_)))"} => v
| _ => raise THM("Intf_of_assn: Proved a different theorem?",~1,[thm])
val intf = singleton (Variable.export_terms ctxt orig_ctxt) intf
|> Logic.dest_type
in
intf
end
datatype rthm_type =
RT_HOPARAM
| RT_FREF
| RT_HNR
| RT_HFREF
| RT_OTHER
fun rthm_type thm =
case Thm.concl_of thm |> HOLogic.dest_Trueprop of
@{mpat "(_,_) ∈ fref _ _ _"} => RT_FREF
| @{mpat "(_,_) ∈ hfref _ _ _"} => RT_HFREF
| @{mpat "hn_refine _ _ _ _ _"} => RT_HNR
| @{mpat "(_,_) ∈ _"} => RT_HOPARAM
| _ => RT_OTHER
fun to_fref ctxt thm = let
open Conv
in
case Thm.concl_of thm |> HOLogic.dest_Trueprop of
@{mpat "(_,_)∈_→_"} =>
Local_Defs.unfold0 ctxt @{thms fref_param1} thm
|> fconv_rule (repeat_conv (Refine_Util.ftop_conv (K (rewr_conv @{thm fref_nest})) ctxt))
|> Local_Defs.unfold0 ctxt @{thms in_CURRY_conv}
| @{mpat "(_,_)∈_"} => thm RS @{thm fref_param0I}
| _ => raise THM ("to_fref: Expected theorem of form (_,_)∈_",~1,[thm])
end
fun to_foparam ctxt thm = let
val unf_thms = @{thms
split_tupled_all prod_rel_simp uncurry_apply cnv_conj_to_meta Product_Type.split}
in
case Thm.concl_of thm of
@{mpat "Trueprop ((_,_) ∈ fref _ _ _)"} =>
(@{thm frefD} OF [thm])
|> forall_intr_vars
|> Local_Defs.unfold0 ctxt unf_thms
|> Variable.gen_all ctxt
| @{mpat "Trueprop ((_,_) ∈ _)"} =>
Parametricity.fo_rule thm
| _ => raise THM("Expected parametricity or fref theorem",~1,[thm])
end
fun to_hnr ctxt thm =
(thm RS @{thm hf2hnr})
|> Local_Defs.unfold0 ctxt @{thms to_hnr_prod_fst_snd keep_drop_sels}
|> Local_Defs.unfold0 ctxt @{thms hnr_uncurry_unfold}
|> Local_Defs.unfold0 ctxt @{thms uncurry_apply uncurry_APP assn_one_left split}
|> Local_Defs.unfold0 ctxt @{thms hn_ctxt_ctxt_fix_conv}
|> Local_Defs.unfold0 ctxt @{thms all_to_meta imp_to_meta HOL.True_implies_equals HOL.implies_True_equals Pure.triv_forall_equality cnv_conj_to_meta}
|> Local_Defs.unfold0 ctxt (Named_Theorems.get ctxt @{named_theorems to_hnr_post})
|> Goal.norm_result ctxt
|> Conv.fconv_rule Thm.eta_conversion
fun prepare_hfref_synth_tac ctxt = let
val i_of_assn_rls =
Named_Theorems_Rev.get ctxt @{named_theorems_rev intf_of_assn}
@ @{thms intf_of_assn_fallback}
val to_hnr_post_rls =
Named_Theorems.get ctxt @{named_theorems to_hnr_post}
val i_of_assn_tac = (
REPEAT' (
DETERM o dresolve_tac ctxt @{thms hfsynth_ID_R_D}
THEN' DETERM o SOLVED' (REPEAT_ALL_NEW (resolve_tac ctxt i_of_assn_rls))
)
)
in
resolve_tac ctxt @{thms hfsynth_hnr_from_hfI} THEN_ELSE' (
SELECT_GOAL (
unfold_tac ctxt @{thms to_hnr_prod_fst_snd keep_drop_sels hf_pres_fst}
THEN unfold_tac ctxt @{thms hnr_uncurry_unfold hfsynth_ID_R_uncurry_unfold}
THEN unfold_tac ctxt @{thms uncurry_apply uncurry_APP assn_one_left split}
THEN unfold_tac ctxt @{thms all_to_meta imp_to_meta HOL.True_implies_equals HOL.implies_True_equals Pure.triv_forall_equality cnv_conj_to_meta}
THEN ALLGOALS i_of_assn_tac
THEN unfold_tac ctxt to_hnr_post_rls
THEN unfold_tac ctxt @{thms APP_def}
)
,
K all_tac
)
end
structure Termtab2 = Table(
type key = term * term
val ord = prod_ord Term_Ord.fast_term_ord Term_Ord.fast_term_ord);
type hnr_analysis = {
thm: thm,
precond: term,
prems : term list,
ahead: term * bool,
chead: term * bool,
argrels: (term * bool) list,
result_rel: term
}
fun analyze_hnr (ctxt:Proof.context) thm = let
val dbg = Unsynchronized.ref []
fun add_dbg msg ts = (
dbg := (msg,ts) :: !dbg;
()
)
fun pretty_dbg (msg,ts) = Pretty.block [
Pretty.str msg,
Pretty.str ":",
Pretty.brk 1,
Pretty.list "[" "]" (map (Syntax.pretty_term ctxt) ts)
]
fun pretty_dbgs l = map pretty_dbg l |> Pretty.fbreaks |> Pretty.block
fun trace_dbg msg = Pretty.block [Pretty.str msg, Pretty.fbrk, pretty_dbgs (rev (!dbg))] |> Pretty.string_of |> tracing
fun fail msg = (trace_dbg msg; raise THM(msg,~1,[thm]))
fun assert cond msg = cond orelse fail msg;
fun check_strip_leading args t f =
if Termtab.defined args f then (t,false) else (f,true)
fun strip_leading_RETURN args (t as @{mpat "RETURN$(?f)"}) = check_strip_leading args t f
| strip_leading_RETURN args (t as @{mpat "RETURN ?f"}) = check_strip_leading args t f
| strip_leading_RETURN _ t = (t,false)
fun strip_leading_return args (t as @{mpat "return$(?f)"}) = check_strip_leading args t f
| strip_leading_return args (t as @{mpat "return ?f"}) = check_strip_leading args t f
| strip_leading_return _ t = (t,false)
fun strip_fun _ (t as @{mpat "PR_CONST _"}) = (t,[])
| strip_fun s (t as @{mpat "?f$?x"}) = check_arg s t f x
| strip_fun s (t as @{mpat "?f ?x"}) = check_arg s t f x
| strip_fun _ f = (f,[])
and check_arg s t f x =
if Termtab.defined s x then
strip_fun s f |> apsnd (curry op :: x)
else (t,[])
fun dest_hn_ctxt @{mpat "hn_ctxt ?R ?a ?c"} = ((a,c),R)
| dest_hn_ctxt _ = fail "Invalid hn_ctxt parameter in pre or postcondition"
fun dest_hn_refine @{mpat "(hn_refine ?G ?c ?G' ?R ?a)"} = (G,c,G',R,a)
| dest_hn_refine _ = fail "Conclusion is not a hn_refine statement"
fun is_emp @{mpat emp} = true | is_emp _ = false
val strip_star' = Sepref_Basic.strip_star #> filter (not o is_emp)
fun pairs_eq pairs1 pairs2 =
Termtab2.forall (Termtab2.defined pairs1 o fst) pairs2
andalso Termtab2.forall (Termtab2.defined pairs2 o fst) pairs1
fun atomize_prem @{mpat "Trueprop ?p"} = p
| atomize_prem _ = fail "Non-atomic premises"
fun mk_conjs [] = @{const True}
| mk_conjs [p] = p
| mk_conjs (p::ps) = HOLogic.mk_binop @{const_name "HOL.conj"} (p,mk_conjs ps)
val _ = add_dbg "thm" [Thm.prop_of thm]
val prems = Thm.prems_of thm
val concl = Thm.concl_of thm |> HOLogic.dest_Trueprop
val (G,c,G',R,a) = dest_hn_refine concl
val pre_pairs = G
|> strip_star'
|> tap (add_dbg "precondition")
|> map dest_hn_ctxt
|> Termtab2.make
val post_pairs = G'
|> strip_star'
|> tap (add_dbg "postcondition")
|> map dest_hn_ctxt
|> Termtab2.make
val _ = assert (pairs_eq pre_pairs post_pairs)
"Parameters in precondition do not match postcondition"
val aa_set = pre_pairs |> Termtab2.keys |> map fst |> Termtab.make_set
val ca_set = pre_pairs |> Termtab2.keys |> map snd |> Termtab.make_set
val (a,leading_RETURN) = strip_leading_RETURN aa_set a
val (c,leading_return) = strip_leading_return ca_set c
val _ = add_dbg "stripped abstract term" [a]
val _ = add_dbg "stripped concrete term" [c]
val (ahead,aargs) = strip_fun aa_set a;
val (chead,cargs) = strip_fun ca_set c;
val _ = add_dbg "abstract head" [ahead]
val _ = add_dbg "abstract args" aargs
val _ = add_dbg "concrete head" [chead]
val _ = add_dbg "concrete args" cargs
val _ = assert (length cargs = length aargs) "Different number of abstract and concrete arguments";
val _ = assert (not (has_duplicates op aconv aargs)) "Duplicate abstract arguments"
val _ = assert (not (has_duplicates op aconv cargs)) "Duplicate concrete arguments"
val argpairs = aargs ~~ cargs
val ap_set = Termtab2.make_set argpairs
val _ = assert (pairs_eq pre_pairs ap_set) "Arguments from pre/postcondition do not match operation's arguments"
val pre_rels = map (the o (Termtab2.lookup pre_pairs)) argpairs
val post_rels = map (the o (Termtab2.lookup post_pairs)) argpairs
val _ = add_dbg "pre-rels" pre_rels
val _ = add_dbg "post-rels" post_rels
fun adjust_hf_pres @{mpat "snd (?R⇧k)"} = R
| adjust_hf_pres t = t
val post_rels = map adjust_hf_pres post_rels
fun is_invalid R @{mpat "invalid_assn ?R'"} = R aconv R'
| is_invalid _ @{mpat "snd (_⇧d)"} = true
| is_invalid _ _ = false
fun is_keep (R,R') =
if R aconv R' then true
else if is_invalid R R' then false
else fail "Mismatch between pre and post relation for argument"
val keep = map is_keep (pre_rels ~~ post_rels)
val argrels = pre_rels ~~ keep
val aa_set = Termtab.make_set aargs
val ca_set = Termtab.make_set cargs
fun is_precond t =
(exists_subterm (Termtab.defined ca_set) t andalso fail "Premise contains concrete argument")
orelse exists_subterm (Termtab.defined aa_set) t
val (preconds, prems) = split is_precond prems
val precond =
map atomize_prem preconds
|> mk_conjs
|> fold lambda aargs
val _ = add_dbg "precond" [precond]
val _ = add_dbg "prems" prems
in
{
thm = thm,
precond = precond,
prems = prems,
ahead = (ahead,leading_RETURN),
chead = (chead,leading_return),
argrels = argrels,
result_rel = R
}
end
fun pretty_hnr_analysis
ctxt
({thm,precond,ahead,chead,argrels,result_rel,...})
: Pretty.T =
let
val _ = thm
fun pretty_argrel (R,k) = Pretty.block [
Syntax.pretty_term ctxt R,
if k then Pretty.str "⇧k" else Pretty.str "⇧d"
]
val pretty_chead = case chead of
(t,false) => Syntax.pretty_term ctxt t
| (t,true) => Pretty.block [Pretty.str "return ", Syntax.pretty_term ctxt t]
val pretty_ahead = case ahead of
(t,false) => Syntax.pretty_term ctxt t
| (t,true) => Pretty.block [Pretty.str "RETURN ", Syntax.pretty_term ctxt t]
in
Pretty.fbreaks [
Pretty.block [
Pretty.enclose "[" "]" [pretty_chead, pretty_ahead],
Pretty.enclose "[" "]" [Syntax.pretty_term ctxt precond],
Pretty.brk 1,
Pretty.block (Pretty.separate " →" (map pretty_argrel argrels @ [Syntax.pretty_term ctxt result_rel]))
]
] |> Pretty.block
end
fun mk_hfref_thm
ctxt
({thm,precond,prems,ahead,chead,argrels,result_rel}) =
let
fun mk_keep (R,true) = @{mk_term "?R⇧k"}
| mk_keep (R,false) = @{mk_term "?R⇧d"}
fun mk_uncurry f = @{mk_term "uncurry ?f"}
fun rpt_uncurry n t =
if n=0 then @{mk_term "uncurry0 ?t"}
else if n=1 then t
else funpow (n-1) mk_uncurry t
fun rew_uncurry_lambda t = let
val rr = map (Logic.dest_equals o Thm.prop_of) @{thms uncurry_def uncurry0_def}
val thy = Proof_Context.theory_of ctxt
in
Pattern.rewrite_term_top thy rr [] t
end
fun gsimp_only ctxt sec = let
val ss = put_simpset HOL_basic_ss ctxt |> sec
in asm_full_simp_tac ss end
fun simp_only ctxt thms = gsimp_only ctxt (fn ctxt => ctxt addsimps thms)
val num_args = length argrels
val precond = precond
|> rpt_uncurry num_args
|> rew_uncurry_lambda
fun mk_RETURN (t,r) = if r then
let
val T = funpow num_args range_type (fastype_of (fst ahead))
val tRETURN = Const (@{const_name RETURN}, T --> Type(@{type_name nres},[T]))
in
Refine_Util.mk_compN num_args tRETURN t
end
else t
fun mk_return (t,r) = if r then
let
val T = funpow num_args range_type (fastype_of (fst chead))
val tRETURN = Const (@{const_name return}, T --> Type(@{type_name Heap},[T]))
in
Refine_Util.mk_compN num_args tRETURN t
end
else t
fun certify_inst ctxt (instT, inst) =
(map (apsnd (Thm.ctyp_of ctxt)) instT,
map (apsnd (Thm.cterm_of ctxt)) inst);
val ahead = ahead |> mk_RETURN |> rpt_uncurry num_args
val chead = chead |> mk_return |> rpt_uncurry num_args
val argrel = map mk_keep argrels |> rev |> mk_hfprods
val result = @{mk_term "Trueprop ((?chead,?ahead) ∈ [?precond]⇩a ?argrel → ?result_rel)"}
val result = Logic.list_implies (prems,result)
val orig_ctxt = ctxt
val (insts, ctxt) = Variable.import_inst true [result] ctxt
val insts' = certify_inst ctxt insts
val result = Term_Subst.instantiate insts result
val thm = Thm.instantiate insts' thm
val thm = Local_Defs.unfold0 ctxt @{thms APP_def} thm
fun tac ctxt =
resolve_tac ctxt @{thms hfrefI}
THEN' gsimp_only ctxt (fn c => c
addsimps @{thms uncurry_def hn_ctxt_def uncurry0_def
keep_drop_sels uc_hfprod_sel o_apply
APP_def}
|> Splitter.add_split @{thm prod.split}
)
THEN' TRY o (
REPEAT_ALL_NEW (match_tac ctxt @{thms allI impI})
THEN' simp_only ctxt @{thms Product_Type.split prod.inject})
THEN' TRY o REPEAT_ALL_NEW (ematch_tac ctxt @{thms conjE})
THEN' TRY o hyp_subst_tac ctxt
THEN' simp_only ctxt @{thms triv_forall_equality}
THEN' (
resolve_tac ctxt @{thms hn_refine_cons[rotated]}
THEN' (resolve_tac ctxt [thm] THEN_ALL_NEW assume_tac ctxt))
THEN_ALL_NEW simp_only ctxt
@{thms hn_ctxt_def entt_refl pure_unit_rel_eq_empty
mult_ac mult_1 mult_1_right keep_drop_sels}
val result = Thm.cterm_of ctxt result
val rthm = Goal.prove_internal ctxt [] result (fn _ => ALLGOALS (tac ctxt))
val rthm = singleton (Variable.export ctxt orig_ctxt) rthm
val rthm = Local_Defs.unfold0 ctxt (Named_Theorems.get ctxt @{named_theorems to_hfref_post}) rthm
in
rthm
end
fun to_hfref ctxt = analyze_hnr ctxt #> mk_hfref_thm ctxt
local
fun norm_set_of ctxt = {
trans_rules = Named_Theorems.get ctxt @{named_theorems fcomp_norm_trans},
cong_rules = Named_Theorems.get ctxt @{named_theorems fcomp_norm_cong},
norm_rules = Named_Theorems.get ctxt @{named_theorems fcomp_norm_norm},
refl_rules = Named_Theorems.get ctxt @{named_theorems fcomp_norm_refl}
}
fun init_rules_of ctxt = Named_Theorems.get ctxt @{named_theorems fcomp_norm_init}
fun unfold_rules_of ctxt = Named_Theorems.get ctxt @{named_theorems fcomp_norm_unfold}
fun simp_rules_of ctxt = Named_Theorems.get ctxt @{named_theorems fcomp_norm_simps}
in
fun norm_fcomp_rule ctxt = let
open PO_Normalizer Refine_Util
val norm1 = gen_norm_rule (init_rules_of ctxt) (norm_set_of ctxt) ctxt
val norm2 = Local_Defs.unfold0 ctxt (unfold_rules_of ctxt)
val norm3 = Conv.fconv_rule (
Simplifier.asm_full_rewrite
(put_simpset HOL_basic_ss ctxt addsimps simp_rules_of ctxt))
val norm = changed_rule (try_rule norm1 o try_rule norm2 o try_rule norm3)
in
repeat_rule norm
end
end
fun add_pure_constraints_rule ctxt thm = let
val orig_ctxt = ctxt
val t = Thm.prop_of thm
fun
cnv (@{mpat (typs) "pure (mpaq_STRUCT (mpaq_Var ?x _) :: (?'v_c×?'v_a) set)"}) =
let
val T = a --> c --> @{typ assn}
val t = Var (x,T)
val t = @{mk_term "(the_pure ?t)"}
in
[(x,T,t)]
end
| cnv (t$u) = union op= (cnv t) (cnv u)
| cnv (Abs (_,_,t)) = cnv t
| cnv _ = []
val pvars = cnv t
val _ = (pvars |> map #1 |> has_duplicates op=)
andalso raise TERM ("Duplicate indexname with different type",[t])
val substs = map (fn (x,_,t) => (x,t)) pvars
val t' = subst_Vars substs t
fun mk_asm (x,T,_) = let
val t = Var (x,T)
val t = @{mk_term "Trueprop (CONSTRAINT is_pure ?t)"}
in
t
end
val assms = map mk_asm pvars
fun add_prems prems t = let
val prems' = Logic.strip_imp_prems t
val concl = Logic.strip_imp_concl t
in
Logic.list_implies (prems@prems', concl)
end
val t' = add_prems assms t'
val (t',ctxt) = yield_singleton (Variable.import_terms true) t' ctxt
val thm' = Goal.prove_internal ctxt [] (Thm.cterm_of ctxt t') (fn _ =>
ALLGOALS (resolve_tac ctxt [thm] THEN_ALL_NEW assume_tac ctxt))
val thm' = norm_fcomp_rule ctxt thm'
val thm' = singleton (Variable.export ctxt orig_ctxt) thm'
in
thm'
end
val cfg_simp_precond =
Attrib.setup_config_bool @{binding fcomp_simp_precond} (K true)
local
fun mk_simp_thm ctxt t = let
val st = t
|> HOLogic.mk_Trueprop
|> Thm.cterm_of ctxt
|> Goal.init
val ctxt = Context_Position.set_visible false ctxt
val ctxt = ctxt addsimps (
refine_pw_simps.get ctxt
@ Named_Theorems.get ctxt @{named_theorems fcomp_prenorm_simps}
@ @{thms split_tupled_all cnv_conj_to_meta}
)
val trace_incomplete_transfer_tac =
COND (Thm.prems_of #> exists (strip_all_body #> Logic.strip_imp_concl #> Term.is_open))
(print_tac ctxt "Failed transfer from intermediate level:") all_tac
val tac =
ALLGOALS (resolve_tac ctxt @{thms auto_weaken_pre_comp_PRE_I} )
THEN ALLGOALS (Simplifier.asm_full_simp_tac ctxt)
THEN trace_incomplete_transfer_tac
THEN ALLGOALS (TRY o filter_prems_tac ctxt (K false))
THEN Local_Defs.unfold0_tac ctxt [Drule.triv_forall_equality]
val st' = tac st |> Seq.take 1 |> Seq.list_of
val thm = case st' of [st'] => Goal.conclude st' | _ => raise THM("Simp_Precond: Simp-Tactic failed",~1,[st])
val _ = exists (Logic.is_all) (Thm.prems_of thm)
andalso raise THM("Simp_Precond: Transfer from intermediate level failed",~1,[thm])
val thm =
thm
|> Conv.fconv_rule (Object_Logic.atomize ctxt)
|> Local_Defs.unfold0 ctxt @{thms auto_weaken_pre_to_imp_nf}
val thm = case Thm.concl_of thm of
@{mpat "Trueprop (_ ⟶ _)"} => thm
| @{mpat "Trueprop _"} => thm RS @{thm auto_weaken_pre_add_dummy_imp}
| _ => raise THM("Simp_Precond: Generated odd theorem, expected form 'P⟶Q'",~1,[thm])
in
thm
end
in
fun simplify_precond ctxt thm = let
val orig_ctxt = ctxt
val thm = Refine_Util.OF_fst @{thms auto_weaken_pre_init} [asm_rl,thm]
val thm =
Local_Defs.unfold0 ctxt @{thms split_tupled_all} thm
OF @{thms auto_weaken_pre_uncurry_start}
fun rec_uncurry thm =
case try (fn () => thm OF @{thms auto_weaken_pre_uncurry_step}) () of
NONE => thm OF @{thms auto_weaken_pre_uncurry_finish}
| SOME thm => rec_uncurry thm
val thm = rec_uncurry thm
|> Conv.fconv_rule Thm.eta_conversion
val t = case Thm.prems_of thm of
t::_ => t | _ => raise THM("Simp-Precond: Expected at least one premise",~1,[thm])
val (t,ctxt) = yield_singleton (Variable.import_terms false) t ctxt
val ((_,t),ctxt) = Variable.focus NONE t ctxt
val t = case t of
@{mpat "Trueprop (_ ⟶ ?t)"} => t | _ => raise TERM("Simp_Precond: Expected implication",[t])
val simpthm = mk_simp_thm ctxt t
|> singleton (Variable.export ctxt orig_ctxt)
val thm = thm OF [simpthm]
val thm = Local_Defs.unfold0 ctxt @{thms prod_casesK} thm
in
thm
end
fun simplify_precond_if_cfg ctxt =
if Config.get ctxt cfg_simp_precond then
simplify_precond ctxt
else I
end
fun compose_ff ctxt A B =
(@{thm fref_compI_PRE} OF [A,B])
|> norm_fcomp_rule ctxt
|> simplify_precond_if_cfg ctxt
|> Conv.fconv_rule Thm.eta_conversion
fun compose_hf ctxt A B =
(@{thm hfref_compI_PRE} OF [A,B])
|> norm_fcomp_rule ctxt
|> simplify_precond_if_cfg ctxt
|> Conv.fconv_rule Thm.eta_conversion
|> add_pure_constraints_rule ctxt
|> Conv.fconv_rule Thm.eta_conversion
fun ensure_fref ctxt thm = case rthm_type thm of
RT_HOPARAM => to_fref ctxt thm
| RT_FREF => thm
| _ => raise THM("Expected parametricity or fref theorem",~1,[thm])
fun ensure_fref_nres ctxt thm = let
val thm = ensure_fref ctxt thm
in
case Thm.concl_of thm of
@{mpat (typs) "Trueprop (_∈fref _ _ (_::(_ nres×_)set))"} => thm
| @{mpat "Trueprop ((_,_)∈fref _ _ _)"} =>
(thm RS @{thm ensure_fref_nresI}) |> Local_Defs.unfold0 ctxt @{thms ensure_fref_nres_unfold}
| _ => raise THM("Expected fref-theorem",~1,[thm])
end
fun ensure_hfref ctxt thm = case rthm_type thm of
RT_HNR => to_hfref ctxt thm
| RT_HFREF => thm
| _ => raise THM("Expected hnr or hfref theorem",~1,[thm])
fun ensure_hnr ctxt thm = case rthm_type thm of
RT_HNR => thm
| RT_HFREF => to_hnr ctxt thm
| _ => raise THM("Expected hnr or hfref theorem",~1,[thm])
fun gen_compose ctxt A B = let
val rtA = rthm_type A
in
if rtA = RT_HOPARAM orelse rtA = RT_FREF then
compose_ff ctxt (ensure_fref ctxt A) (ensure_fref ctxt B)
else
compose_hf ctxt (ensure_hfref ctxt A) ((ensure_fref_nres ctxt B))
end
val parse_fcomp_flags = Refine_Util.parse_paren_lists
(Refine_Util.parse_bool_config "prenorm" cfg_simp_precond)
val fcomp_attrib = parse_fcomp_flags |-- Attrib.thm >> (fn B => Thm.rule_attribute [] (fn context => fn A =>
let
val ctxt = Context.proof_of context
in
gen_compose ctxt A B
end))
end
›
attribute_setup to_fref = ‹
Scan.succeed (Thm.rule_attribute [] (Sepref_Rules.to_fref o Context.proof_of))
› "Convert parametricity theorem to uncurried fref-form"
attribute_setup to_foparam = ‹
Scan.succeed (Thm.rule_attribute [] (Sepref_Rules.to_foparam o Context.proof_of))
› ‹Convert param or fref rule to first order rule›
attribute_setup param_fo = ‹
Scan.succeed (Thm.rule_attribute [] (Sepref_Rules.to_foparam o Context.proof_of))
› ‹Convert param or fref rule to first order rule›
attribute_setup to_hnr = ‹
Scan.succeed (Thm.rule_attribute [] (Sepref_Rules.to_hnr o Context.proof_of))
› "Convert hfref-rule to hnr-rule"
attribute_setup to_hfref = ‹Scan.succeed (
Thm.rule_attribute [] (Context.proof_of #> Sepref_Rules.to_hfref)
)› ‹Convert hnr to hfref theorem›
attribute_setup ensure_fref_nres = ‹Scan.succeed (
Thm.rule_attribute [] (Context.proof_of #> Sepref_Rules.ensure_fref_nres)
)›
attribute_setup sepref_dbg_norm_fcomp_rule = ‹Scan.succeed (
Thm.rule_attribute [] (Context.proof_of #> Sepref_Rules.norm_fcomp_rule)
)›
attribute_setup sepref_simplify_precond = ‹Scan.succeed (
Thm.rule_attribute [] (Context.proof_of #> Sepref_Rules.simplify_precond)
)› ‹Simplify precondition of fref/hfref-theorem›
attribute_setup FCOMP = Sepref_Rules.fcomp_attrib "Composition of refinement rules"
end
Theory Sepref_Combinator_Setup
section ‹Setup for Combinators›
theory Sepref_Combinator_Setup
imports Sepref_Rules Sepref_Monadify
keywords "sepref_register" :: thy_decl
and "sepref_decl_intf" :: thy_decl
begin
subsection ‹Interface Types›
text ‹
This tool allows the declaration of interface types.
An interface type is a new type, and a rewriting rule to an existing (logic) type,
which is used to encode objects of the interface type in the logic.
›
context begin
private definition T :: "string ⇒ unit list ⇒ unit" where "T _ _ ≡ ()"
private lemma unit_eq: "(a::unit) ≡ b" by simp
named_theorems "__itype_rewrite"
ML ‹
signature SEPREF_INTF_TYPES = sig
val decl_intf_type_cmd: ((string list * binding) * mixfix) * string -> local_theory -> local_theory
val register_itype_rewrite: typ -> typ -> Proof.context -> local_theory
val norm_intf_type: Proof.context -> typ -> typ
val check_intf_type: Proof.context -> typ -> typ -> bool
val check_intf_type_msg: (typ * typ -> unit) -> Proof.context -> typ -> typ -> unit
val check_intf_type_err: Proof.context -> typ -> typ -> unit
end
structure Sepref_Intf_Types: SEPREF_INTF_TYPES = struct
fun t2t (Type(name,args)) =
@{term T}
$HOLogic.mk_string name
$HOLogic.mk_list @{typ unit} (map t2t args)
| t2t (TFree (name,_)) = Var (("F"^name,0),HOLogic.unitT)
| t2t (TVar ((name,i),_)) = Var (("V"^name,i),HOLogic.unitT)
fun tt2 (t as (Var ((name,i),_))) =
if match_string "F*" name then TFree (unprefix "F" name, dummyS)
else if match_string "V*" name then TVar ((unprefix "V" name,i), dummyS)
else raise TERM("tt2: Invalid var",[t])
| tt2 @{mpat "T ?name ?args"} = Type (HOLogic.dest_string name, HOLogic.dest_list args |> map tt2)
| tt2 t = raise TERM("tt2: Invalid",[t])
fun mk_t2t_rew ctxt T1 T2 = let
fun chk_vars T = exists_subtype is_TVar T andalso raise TYPE("Type must not contain schematics",[T],[])
val _ = chk_vars T1
val _ = chk_vars T2
val free1 = Term.add_tfreesT T1 []
val free2 = Term.add_tfreesT T2 []
val _ = subset (=) (free2,free1) orelse raise TYPE("Free variables on RHS must also occur on LHS",[T1,T2],[])
in
Thm.instantiate' [] [
t2t T1 |> Thm.cterm_of ctxt |> SOME,
t2t T2 |> Thm.cterm_of ctxt |> SOME
]
@{thm unit_eq}
end
fun register_itype_rewrite T1 T2 lthy =
lthy
|> Local_Theory.note ((Binding.empty,@{attributes ["__itype_rewrite"]}),[mk_t2t_rew lthy T1 T2])
|> #2
val decl_intf_type_parser =
Parse.type_args -- Parse.binding -- Parse.opt_mixfix --| @{keyword "is"} -- Parse.typ
fun decl_intf_type_cmd (((args,a),mx),T2_raw) lthy = let
val (T1,lthy) = Typedecl.typedecl {final = true} (a, map (rpair dummyS) args, mx) lthy
val T2 = Syntax.read_typ lthy T2_raw
in
register_itype_rewrite T1 T2 lthy
end
fun norm_intf_typet ctxt T = let
val rew_rls = Named_Theorems.get ctxt @{named_theorems "__itype_rewrite"}
in
t2t T
|> Thm.cterm_of ctxt
|> Drule.mk_term
|> Local_Defs.unfold0 ctxt rew_rls
|> Drule.dest_term
|> Thm.term_of
end
fun norm_intf_type ctxt T = norm_intf_typet ctxt T |> tt2
fun check_intf_type ctxt iT cT = let
val it = norm_intf_typet ctxt iT
val ct = t2t cT
val thy = Proof_Context.theory_of ctxt
in
Pattern.matches thy (it,ct)
end
fun check_intf_type_msg msg ctxt iT cT = let
val it = norm_intf_typet ctxt iT
val ct = t2t cT
val thy = Proof_Context.theory_of ctxt
in
if Pattern.matches thy (it,ct) then ()
else msg (tt2 it, tt2 ct)
end
fun check_intf_type_err ctxt iT cT = let
fun msg (iT',cT') = Pretty.block [
Pretty.str "Interface type and logical type do not match",
Pretty.fbrk,
Pretty.str "Interface: ",Syntax.pretty_typ ctxt iT, Pretty.brk 1,
Pretty.str " is ", Syntax.pretty_typ ctxt iT', Pretty.fbrk,
Pretty.str "Logical: ",Syntax.pretty_typ ctxt cT, Pretty.brk 1,
Pretty.str " is ", Syntax.pretty_typ ctxt cT', Pretty.fbrk
] |> Pretty.string_of |> error
in
check_intf_type_msg msg ctxt iT cT
end
val _ =
Outer_Syntax.local_theory
@{command_keyword "sepref_decl_intf"}
"Declare interface type"
( decl_intf_type_parser >> decl_intf_type_cmd);
end
›
end
subsection ‹Rewriting Inferred Interface Types›
definition map_type_eq :: "'a itself ⇒ 'b itself ⇒ bool"
(infixr "→⇩n⇩t" 60)
where [simp]: "map_type_eq _ _ ≡ True"
lemma map_type_eqI: "map_type_eq L R" by auto
named_theorems_rev map_type_eqs
subsection ‹ML-Code›
context begin
private lemma start_eval: "x ≡ SP x" by auto
private lemma add_eval: "f x ≡ (⤜)$(EVAL$x)$(λ⇩2x. f x)" by auto
private lemma init_mk_arity: "f ≡ id (SP f)" by simp
private lemma add_mk_arity: "id f ≡ (λ⇩2x. id (f$x))" by auto
private lemma finish_mk_arity: "id f ≡ f" by simp
ML ‹
structure Sepref_Combinator_Setup = struct
fun is_valid_abs_op _ (Const _) = true
| is_valid_abs_op ctxt (Free (name,_)) = Variable.is_fixed ctxt name
| is_valid_abs_op _ @{mpat "PR_CONST _"} = true
| is_valid_abs_op _ _ = false
fun mk_itype ctxt t tyt = let
val cert = Thm.cterm_of ctxt
val t = cert t
val tyt = cert tyt
in
Drule.infer_instantiate' ctxt [SOME t, SOME tyt] @{thm itypeI}
end
fun mk_mcomb ctxt t n = let
val T = fastype_of t
val (argsT,_) = strip_type T
val _ = length argsT >= n orelse raise TERM("Too few arguments",[t])
val effT = take n argsT
val orig_ctxt = ctxt
val names = map (fn i => "x"^string_of_int i) (1 upto n)
val (names,ctxt) = Variable.variant_fixes names ctxt
val vars = map Free (names ~~ effT)
val lhs = Autoref_Tagging.list_APP (t,vars)
|> Thm.cterm_of ctxt
fun add_EVAL x thm =
case Thm.prop_of thm of
@{mpat "_ ≡ ?rhs"} => let
val f = lambda x rhs |> Thm.cterm_of ctxt
val x = Thm.cterm_of ctxt x
val eval_thm = Drule.infer_instantiate' ctxt
[SOME f, SOME x] @{thm add_eval}
val thm = @{thm transitive} OF [thm,eval_thm]
in thm end
| _ => raise THM ("mk_mcomb internal: Expected lhs==rhs",~1,[thm])
val thm = Drule.infer_instantiate' ctxt [SOME lhs] @{thm start_eval}
val thm = fold add_EVAL (rev vars) thm
val thm = singleton (Proof_Context.export ctxt orig_ctxt) thm
in
thm
end;
fun mk_arity ctxt t n = let
val t = Thm.cterm_of ctxt t
val thm = Drule.infer_instantiate' ctxt [SOME t] @{thm init_mk_arity}
val add_mk_arity = Conv.fconv_rule (
Refine_Util.ftop_conv (K (Conv.rewr_conv @{thm add_mk_arity})) ctxt)
val thm = funpow n add_mk_arity thm
val thm = Conv.fconv_rule (
Refine_Util.ftop_conv (K (Conv.rewr_conv @{thm finish_mk_arity})) ctxt) thm
in
thm
end;
datatype opkind = PURE | COMB
fun analyze_decl c tyt = let
fun add_tcons_of (Type (name,args)) l = fold add_tcons_of args (name::l)
| add_tcons_of _ l = l
fun all_tcons_of P T = forall P (add_tcons_of T [])
val T = Logic.dest_type tyt
val (argsT,resT) = strip_type T
val _ = forall (all_tcons_of (fn tn => tn <> @{type_name nres})) argsT
orelse raise TYPE (
"Arguments contain nres-type "
^ "(currently not supported by this attribute)",
argsT,[c,tyt])
val kind = case resT of
Type (@{type_name nres},_) => COMB
| T => let
val _ = all_tcons_of (fn tn => tn <> @{type_name nres}) T
orelse raise TYPE (
"Result contains inner nres-type",
argsT,[c,tyt])
in
PURE
end
in (kind,(argsT,resT)) end
fun analyze_itype_thm thm =
case Thm.prop_of thm of
@{mpat (typs) "Trueprop (intf_type ?c (_::?'v_T itself))"} => let
val tyt = Logic.mk_type T
val (kind,(argsT,resT)) = analyze_decl c tyt
in (c,kind,(argsT,resT)) end
| _ => raise THM("Invalid itype-theorem",~1,[thm])
fun generate_basename ctxt t = let
fun fail () = raise TERM ("Basename generation heuristics failed. Specify a basename.",[t])
fun gb (Const (n,_)) =
n |> space_explode "." |> List.last
| gb (@{mpat "PR_CONST ?t"}) = gb t
| gb (t as (_$_)) = let
val h = head_of t
val _ = is_Const h orelse is_Free h orelse fail ()
in
gb h
end
| gb (Free (n,_)) =
if Variable.is_fixed ctxt n then n
else fail ()
| gb _ = fail ()
in
gb t
end
fun map_type_raw ctxt rls T = let
val thy = Proof_Context.theory_of ctxt
fun rewr_this (lhs,rhs) T = let
val env = Sign.typ_match thy (lhs,T) Vartab.empty
in
Envir.norm_type env rhs
end
fun map_Targs f (Type (name,args)) = Type (name,map f args)
| map_Targs _ T = T
fun
rewr_thiss (r::rls) T =
(SOME (rewr_this r T) handle Type.TYPE_MATCH => rewr_thiss rls T)
| rewr_thiss [] _ = NONE
fun
map_type_aux T =
let
val T = map_Targs map_type_aux T
in
case rewr_thiss rls T of
SOME T => map_type_aux T
| NONE => T
end
in
map_type_aux T
end
fun get_nt_rule thm = case Thm.prop_of thm of
@{mpat (typs) "Trueprop (map_type_eq (_::?'v_L itself) (_::?'v_R itself))"} =>
let
val Lvars = Term.add_tvar_namesT L []
val Rvars = Term.add_tvar_namesT R []
val _ = subset (=) (Rvars, Lvars) orelse (
let
val frees = subtract (=) Lvars Rvars
|> map (Term.string_of_vname)
|> Pretty.str_list "[" "]"
|> Pretty.string_of
in
raise THM ("Free variables on RHS: "^frees,~1,[thm])
end)
in
(L,R)
end
| _ => raise THM("No map_type_eq theorem",~1,[thm])
fun map_type ctxt T = let
val rls =
Named_Theorems_Rev.get ctxt @{named_theorems_rev map_type_eqs}
|> map get_nt_rule
in map_type_raw ctxt rls T end
fun read_term_type ts tys lthy = case tys of
SOME ty => let
val ty = Syntax.read_typ lthy ty
val ctxt = Variable.declare_typ ty lthy
val t = Syntax.read_term ctxt ts
val ctxt = Variable.declare_term t ctxt
in
((t,ty),ctxt)
end
| NONE => let
val t = Syntax.read_term lthy ts
val ctxt = Variable.declare_term t lthy
val tyt = fastype_of t |> map_type ctxt |> Logic.mk_type
val tyt = tyt |> singleton (Variable.export_terms ctxt lthy)
val (tyt,ctxt) = yield_singleton (Variable.import_terms true) tyt ctxt
val ty = Logic.dest_type tyt
in
((t,ty),ctxt)
end
fun check_type_intf ctxt Tc Ti = let
fun type2term (TFree (name,_)) = Var (("F"^name,0),HOLogic.unitT)
| type2term (TVar ((name,i),_)) = Var (("V"^name,i),HOLogic.unitT)
| type2term (Type (@{type_name "fun"},[T1,T2])) =
Free ("F",HOLogic.unitT --> HOLogic.unitT --> HOLogic.unitT)
$type2term T1$type2term T2
| type2term (Type (name,argsT)) = let
val args = map type2term argsT
val n = length args
val T = replicate n HOLogic.unitT ---> HOLogic.unitT
val v = Var (("T"^name,0),T)
in list_comb (v, args) end
val c = type2term Tc
val i = type2term Ti
val thy = Proof_Context.theory_of ctxt
in
Pattern.matches thy (i,c)
end
fun import_terms_disj ts ctxt = let
fun exp ctxt t = let
val new_ctxt = Variable.declare_term t ctxt
val t = singleton (Variable.export_terms new_ctxt ctxt) t
in t end
val ts = map (exp ctxt) ts
fun cons_fst f a (l,b) = let val (a,b) = f a b in (a::l,b) end
val (ts,ctxt) = fold_rev (cons_fst (yield_singleton (Variable.import_terms true))) ts ([],ctxt)
in
(ts,ctxt)
end
type reg_thms = {
itype_thm: thm,
arity_thm: thm option,
mcomb_thm: thm option
}
fun cr_reg_thms t ty ctxt = let
val orig_ctxt = ctxt
val tyt = Logic.mk_type ty
val ([t,tyt],ctxt) = import_terms_disj [t,tyt] ctxt
val (kind,(argsT,_)) = analyze_decl t tyt
val n = length argsT
val _ = Sepref_Intf_Types.check_intf_type_err ctxt ty (fastype_of t)
val _ = is_valid_abs_op ctxt t
orelse raise TERM("Malformed abstract operation. Use PR_CONST for complex terms.",[t])
val itype_thm = mk_itype ctxt t tyt
|> singleton (Variable.export ctxt orig_ctxt)
in
case kind of
PURE => {itype_thm = itype_thm, arity_thm = NONE, mcomb_thm = NONE}
| COMB => let
val arity_thm = mk_arity ctxt t n
|> singleton (Variable.export ctxt orig_ctxt)
val mcomb_thm = mk_mcomb ctxt t n
|> singleton (Variable.export ctxt orig_ctxt)
in
{itype_thm = itype_thm, arity_thm = SOME arity_thm, mcomb_thm = SOME mcomb_thm}
end
end
fun gen_pr_const_pat ctxt t =
if is_valid_abs_op ctxt t then (NONE,t)
else
let
val ct = Thm.cterm_of ctxt t
val thm = Drule.infer_instantiate' ctxt [SOME ct] @{thm UNPROTECT_def[symmetric]}
|> Conv.fconv_rule (Conv.arg1_conv (Id_Op.protect_conv ctxt))
in
(SOME thm,@{mk_term "PR_CONST ?t"})
end
fun sepref_register_single basename t ty lthy = let
fun mk_qualified basename q = Binding.qualify true basename (Binding.name q);
fun
do_note _ _ NONE = I
| do_note q attrs (SOME thm) =
Local_Theory.note ((mk_qualified basename q,attrs),[thm]) #> snd
val (pat_thm,t) = gen_pr_const_pat lthy t
val {itype_thm, arity_thm, mcomb_thm} = cr_reg_thms t ty lthy
val lthy = lthy
|> do_note "pat" @{attributes [def_pat_rules]} pat_thm
|> do_note "itype" @{attributes [id_rules]} (SOME itype_thm)
|> do_note "arity" @{attributes [sepref_monadify_arity]} arity_thm
|> do_note "mcomb" @{attributes [sepref_monadify_comb]} mcomb_thm
in
(((arity_thm,mcomb_thm),itype_thm),lthy)
end
fun sepref_register_single_cmd ((basename,ts),tys) lthy = let
val t = Syntax.read_term lthy ts
val ty = map_option (Syntax.read_typ lthy) tys
val ty = case ty of SOME ty => ty | NONE => fastype_of t |> map_type lthy
val basename = case basename of
NONE => generate_basename lthy t
| SOME n => n
val ((_,itype_thm),lthy) = sepref_register_single basename t ty lthy
val _ = Thy_Output.pretty_thm lthy itype_thm |> Pretty.string_of |> writeln
in
lthy
end
val sepref_register_cmd = fold sepref_register_single_cmd
val sepref_register_parser = Scan.repeat1 (
Scan.option (Parse.name --| @{keyword ":"})
-- Parse.term
-- Scan.option (@{keyword "::"} |-- Parse.typ)
)
val _ =
Outer_Syntax.local_theory
@{command_keyword "sepref_register"}
"Register operation for sepref"
( sepref_register_parser
>> sepref_register_cmd);
val sepref_register_adhoc_parser = Scan.repeat1 (
Args.term -- Scan.option (Scan.lift (Args.$$$ "::") |-- Args.typ)
)
fun sepref_register_adhoc_single (t,ty) context = let
val ctxt = Context.proof_of context
val ty = case ty of SOME ty => ty | NONE => fastype_of t |> map_type ctxt
val (pat_thm,t) = gen_pr_const_pat ctxt t
val {itype_thm, arity_thm, mcomb_thm} = cr_reg_thms t ty ctxt
fun app _ NONE = I
| app attr (SOME thm) = Thm.apply_attribute attr thm #> snd
in
context
|> app (Named_Theorems_Rev.add @{named_theorems_rev def_pat_rules}) pat_thm
|> app (Named_Theorems_Rev.add @{named_theorems_rev id_rules}) (SOME itype_thm)
|> app (Named_Theorems_Rev.add @{named_theorems_rev sepref_monadify_arity}) arity_thm
|> app (Named_Theorems_Rev.add @{named_theorems_rev sepref_monadify_comb}) mcomb_thm
end
val sepref_register_adhoc = fold sepref_register_adhoc_single
fun sepref_register_adhoc_attr ttys = Thm.declaration_attribute (K (sepref_register_adhoc ttys))
val sepref_register_adhoc_attr_decl = sepref_register_adhoc_parser >> sepref_register_adhoc_attr
end
›
end
attribute_setup sepref_register_adhoc = Sepref_Combinator_Setup.sepref_register_adhoc_attr_decl
‹Register operations in ad-hoc manner. Improper if this gets exported!›
subsection ‹Obsolete Manual Setup Rules›
lemma
mk_mcomb1: "⋀c. c$x1 ≡ (⤜)$(EVAL$x1)$(λ⇩2x1. SP (c$x1))"
and mk_mcomb2: "⋀c. c$x1$x2 ≡ (⤜)$(EVAL$x1)$(λ⇩2x1. (⤜)$(EVAL$x2)$(λ⇩2x2. SP (c$x1$x2)))"
and mk_mcomb3: "⋀c. c$x1$x2$x3 ≡ (⤜)$(EVAL$x1)$(λ⇩2x1. (⤜)$(EVAL$x2)$(λ⇩2x2. (⤜)$(EVAL$x3)$(λ⇩2x3. SP (c$x1$x2$x3))))"
by auto
end
Theory Sepref_Translate
section ‹Translation›
theory Sepref_Translate
imports
Sepref_Monadify
Sepref_Constraints
Sepref_Frame
"Lib/Pf_Mono_Prover"
Sepref_Rules
Sepref_Combinator_Setup
"Lib/User_Smashing"
begin
text ‹
This theory defines the translation phase.
The main functionality of the translation phase is to
apply refinement rules. Thereby, the linearity information is
exploited to create copies of parameters that are still required, but
would be destroyed by a synthesized operation.
These \emph{frame-based} rules are in the named theorem collection
‹sepref_fr_rules›, and the collection ‹sepref_copy_rules›
contains rules to handle copying of parameters.
Apart from the frame-based rules described above, there is also a set of
rules for combinators, in the collection ‹sepref_comb_rules›,
where no automatic copying of parameters is applied.
Moreover, this theory contains
\begin{itemize}
\item A setup for the basic monad combinators and recursion.
\item A tool to import parametricity theorems.
\item Some setup to identify pure refinement relations, i.e., those not
involving the heap.
\item A preprocessor that identifies parameters in refinement goals,
and flags them with a special tag, that allows their correct handling.
\end{itemize}
›
text ‹Tag to keep track of abstract bindings.
Required to recover information for side-condition solving.›
definition "bind_ref_tag x m ≡ RETURN x ≤ m"
text ‹Tag to keep track of preconditions in assertions›
definition "vassn_tag Γ ≡ ∃h. h⊨Γ"
lemma vassn_tagI: "h⊨Γ ⟹ vassn_tag Γ"
unfolding vassn_tag_def ..
lemma vassn_dest[dest!]:
"vassn_tag (Γ⇩1 * Γ⇩2) ⟹ vassn_tag Γ⇩1 ∧ vassn_tag Γ⇩2"
"vassn_tag (hn_ctxt R a b) ⟹ a∈rdom R"
unfolding vassn_tag_def rdomp_def[abs_def]
by (auto simp: mod_star_conv hn_ctxt_def)
lemma entails_preI:
assumes "vassn_tag A ⟹ A ⟹⇩A B"
shows "A ⟹⇩A B"
using assms
by (auto simp: entails_def vassn_tag_def)
lemma invalid_assn_const:
"invalid_assn (λ_ _. P) x y = ↑(vassn_tag P) * true"
by (simp_all add: invalid_assn_def vassn_tag_def)
lemma vassn_tag_simps[simp]:
"vassn_tag emp"
"vassn_tag true"
by (sep_auto simp: vassn_tag_def mod_emp)+
definition "GEN_ALGO f Φ ≡ Φ f"
lemma is_GEN_ALGO: "GEN_ALGO f Φ ⟹ GEN_ALGO f Φ" .
text ‹Tag for side-condition solver to discharge by assumption›
definition RPREM :: "bool ⇒ bool" where [simp]: "RPREM P = P"
lemma RPREMI: "P ⟹ RPREM P" by simp
lemma trans_frame_rule:
assumes "RECOVER_PURE Γ Γ'"
assumes "vassn_tag Γ' ⟹ hn_refine Γ' c Γ'' R a"
shows "hn_refine (F*Γ) c (F*Γ'') R a"
apply (rule hn_refine_frame[OF _ entt_refl])
applyF (rule hn_refine_cons_pre)
focus using assms(1) unfolding RECOVER_PURE_def apply assumption solved
apply1 (rule hn_refine_preI)
apply1 (rule assms)
applyS (auto simp add: vassn_tag_def)
solved
done
lemma recover_pure_cons:
assumes "RECOVER_PURE Γ Γ'"
assumes "hn_refine Γ' c Γ'' R a"
shows "hn_refine (Γ) c (Γ'') R a"
using trans_frame_rule[where F=emp, OF assms] by simp
definition CPR_TAG :: "assn ⇒ assn ⇒ bool" where [simp]: "CPR_TAG y x ≡ True"
lemma CPR_TAG_starI:
assumes "CPR_TAG P1 Q1"
assumes "CPR_TAG P2 Q2"
shows "CPR_TAG (P1*P2) (Q1*Q2)"
by simp
lemma CPR_tag_ctxtI: "CPR_TAG (hn_ctxt R x xi) (hn_ctxt R' x xi)" by simp
lemma CPR_tag_fallbackI: "CPR_TAG P Q" by simp
lemmas CPR_TAG_rules = CPR_TAG_starI CPR_tag_ctxtI CPR_tag_fallbackI
lemma cons_pre_rule:
assumes "CPR_TAG P P'"
assumes "P ⟹⇩t P'"
assumes "hn_refine P' c Q R m"
shows "hn_refine P c Q R m"
using assms(2-) by (rule hn_refine_cons_pre)
named_theorems_rev sepref_gen_algo_rules ‹Sepref: Generic algorithm rules›
ML ‹
structure Sepref_Translate = struct
val cfg_debug =
Attrib.setup_config_bool @{binding sepref_debug_translate} (K false)
val dbg_msg_tac = Sepref_Debugging.dbg_msg_tac cfg_debug
fun gen_msg_analyze t ctxt = let
val t = Logic.strip_assums_concl t
in
case t of
@{mpat "Trueprop ?t"} => (case t of
@{mpat "_ ∨⇩A _ ⟹⇩t _"} => "t_merge"
| @{mpat "_ ⟹⇩t _"} => "t_frame"
| @{mpat "INDEP _"} => "t_indep"
| @{mpat "CONSTRAINT _ _"} => "t_constraint"
| @{mpat "mono_Heap _"} => "t_mono"
| @{mpat "PREFER_tag _"} => "t_prefer"
| @{mpat "DEFER_tag _"} => "t_defer"
| @{mpat "RPREM _"} => "t_rprem"
| @{mpat "hn_refine _ _ _ _ ?a"} => Pretty.block [Pretty.str "t_hnr: ",Pretty.brk 1, Syntax.pretty_term ctxt a] |> Pretty.string_of
| _ => "Unknown goal type"
)
| _ => "Non-Trueprop goal"
end
fun msg_analyze msg = Sepref_Debugging.msg_from_subgoal msg gen_msg_analyze
fun check_side_conds thm = let
open Sepref_Basic
fun is_atomic (Const (_,@{typ "assn⇒assn⇒assn"})$_$_) = false
| is_atomic _ = true
val is_atomic_star_list = ("Expected atoms separated by star",forall is_atomic o strip_star)
val is_trueprop = ("Expected Trueprop conclusion",can HOLogic.dest_Trueprop)
fun assert t' (msg,p) t = if p t then () else raise TERM(msg,[t',t])
fun chk_prem t = let
val assert = assert t
fun chk @{mpat "?l ∨⇩A ?r ⟹⇩t ?m"} = (
assert is_atomic_star_list l;
assert is_atomic_star_list r;
assert is_atomic_star_list m
)
| chk (t as @{mpat "_ ⟹⇩A _"}) = raise TERM("Invalid frame side condition (old-style ent)",[t])
| chk @{mpat "?l ⟹⇩t ?r"} = (
assert is_atomic_star_list l;
assert is_atomic_star_list r
)
| chk _ = ()
val t = Logic.strip_assums_concl t
in
assert is_trueprop t;
chk (HOLogic.dest_Trueprop t)
end
in
map chk_prem (Thm.prems_of thm)
end
structure sepref_comb_rules = Named_Sorted_Thms (
val name = @{binding "sepref_comb_rules"}
val description = "Sepref: Combinator rules"
val sort = K I
fun transform _ thm = let
val _ = check_side_conds thm
in
[thm]
end
)
structure sepref_fr_rules = Named_Sorted_Thms (
val name = @{binding "sepref_fr_rules"}
val description = "Sepref: Frame-based rules"
val sort = K I
fun transform context thm = let
val ctxt = Context.proof_of context
val thm = Sepref_Rules.ensure_hnr ctxt thm
|> Conv.fconv_rule (Sepref_Frame.align_rl_conv ctxt)
val _ = check_side_conds thm
val _ = case try (Sepref_Rules.analyze_hnr ctxt) thm of
NONE =>
(Pretty.block [
Pretty.str "hnr-analysis failed",
Pretty.str ":",
Pretty.brk 1,
Thm.pretty_thm ctxt thm])
|> Pretty.string_of |> error
| SOME ana => let
val _ = Sepref_Combinator_Setup.is_valid_abs_op ctxt (fst (#ahead ana))
orelse Pretty.block [
Pretty.str "Invalid abstract head:",
Pretty.brk 1,
Pretty.enclose "(" ")" [Syntax.pretty_term ctxt (fst (#ahead ana))],
Pretty.brk 1,
Pretty.str "in thm",
Pretty.brk 1,
Thm.pretty_thm ctxt thm
]
|> Pretty.string_of |> error
in () end
in
[thm]
end
)
local
open Sepref_Basic
in
fun side_unfold_tac ctxt = let
in
CONVERSION (Id_Op.unprotect_conv ctxt)
THEN' SELECT_GOAL (Local_Defs.unfold0_tac ctxt @{thms bind_ref_tag_def})
end
fun side_fallback_tac ctxt = side_unfold_tac ctxt THEN' TRADE (SELECT_GOAL o auto_tac) ctxt
val side_frame_tac = Sepref_Frame.frame_tac side_fallback_tac
val side_merge_tac = Sepref_Frame.merge_tac side_fallback_tac
fun side_constraint_tac ctxt = Sepref_Constraints.constraint_tac ctxt
fun side_mono_tac ctxt = side_unfold_tac ctxt THEN' TRADE Pf_Mono_Prover.mono_tac ctxt
fun side_gen_algo_tac ctxt =
side_unfold_tac ctxt
THEN' resolve_tac ctxt (Named_Theorems_Rev.get ctxt @{named_theorems_rev sepref_gen_algo_rules})
fun side_pref_def_tac ctxt =
side_unfold_tac ctxt THEN'
TRADE (fn ctxt =>
resolve_tac ctxt @{thms PREFER_tagI DEFER_tagI}
THEN' (Sepref_Debugging.warning_tac' "Obsolete PREFER/DEFER side condition" ctxt THEN' Tagged_Solver.solve_tac ctxt)
) ctxt
fun side_rprem_tac ctxt =
resolve_tac ctxt @{thms RPREMI} THEN' Refine_Util.rprems_tac ctxt
THEN' (K (smash_new_rule ctxt))
fun side_cond_dispatch_tac dbg hnr_tac ctxt = let
fun MK tac = if dbg then CHANGED o tac ctxt else SOLVED' (tac ctxt)
val t_merge = MK side_merge_tac
val t_frame = MK side_frame_tac
val t_indep = MK Indep_Vars.indep_tac
val t_constraint = MK side_constraint_tac
val t_mono = MK side_mono_tac
val t_pref_def = MK side_pref_def_tac
val t_rprem = MK side_rprem_tac
val t_gen_algo = side_gen_algo_tac ctxt
val t_fallback = MK side_fallback_tac
in
WITH_concl
(fn @{mpat "Trueprop ?t"} => (case t of
@{mpat "_ ∨⇩A _ ⟹⇩t _"} => t_merge
| @{mpat "_ ⟹⇩t _"} => t_frame
| @{mpat "_ ⟹⇩A _"} => Sepref_Debugging.warning_tac' "Old-style frame side condition" ctxt THEN' (K no_tac)
| @{mpat "INDEP _"} => t_indep
| @{mpat "CONSTRAINT _ _"} => t_constraint
| @{mpat "mono_Heap _"} => t_mono
| @{mpat "PREFER_tag _"} => t_pref_def
| @{mpat "DEFER_tag _"} => t_pref_def
| @{mpat "RPREM _"} => t_rprem
| @{mpat "GEN_ALGO _ _"} => t_gen_algo
| @{mpat "hn_refine _ _ _ _ _"} => hnr_tac
| _ => t_fallback
)
| _ => K no_tac
)
end
end
local
open Sepref_Basic STactical
in
fun trans_comb_tac ctxt = let
val comb_rl_net = sepref_comb_rules.get ctxt
|> Tactic.build_net
in
DETERM o (
resolve_from_net_tac ctxt comb_rl_net
ORELSE' (
Sepref_Frame.norm_goal_pre_tac ctxt
THEN' resolve_from_net_tac ctxt comb_rl_net
)
)
end
fun gen_trans_op_tac dbg ctxt = let
val fr_rl_net = sepref_fr_rules.get ctxt |> Tactic.build_net
val fr_rl_tac =
resolve_from_net_tac ctxt fr_rl_net
ORELSE' (
Sepref_Frame.norm_goal_pre_tac ctxt
THEN' (
resolve_from_net_tac ctxt fr_rl_net
ORELSE' (
resolve_tac ctxt @{thms cons_pre_rule}
THEN_ALL_NEW_LIST [
SOLVED' (REPEAT_ALL_NEW_FWD (DETERM o resolve_tac ctxt @{thms CPR_TAG_rules})),
K all_tac,
resolve_from_net_tac ctxt fr_rl_net
]
)
)
)
val side_tac = REPEAT_ALL_NEW_FWD (side_cond_dispatch_tac false (K no_tac) ctxt)
val fr_tac =
if dbg then
fr_rl_tac THEN_ALL_NEW_FWD (TRY o side_tac)
else
DETERM o SOLVED' (fr_rl_tac THEN_ALL_NEW_FWD (SOLVED' side_tac))
in
PHASES' [
("Align goal",Sepref_Frame.align_goal_tac, 0),
("Frame rule",fn ctxt => resolve_tac ctxt @{thms trans_frame_rule}, 1),
("Recover pure",Sepref_Frame.recover_pure_tac, ~1),
("Apply rule",K fr_tac,~1)
] (flag_phases_ctrl dbg) ctxt
end
fun gen_trans_step_tac dbg ctxt = side_cond_dispatch_tac dbg
(trans_comb_tac ctxt ORELSE' gen_trans_op_tac dbg ctxt)
ctxt
val trans_step_tac = gen_trans_step_tac false
val trans_step_keep_tac = gen_trans_step_tac true
fun gen_trans_tac dbg ctxt =
PHASES' [
("Translation steps",REPEAT_DETERM' o trans_step_tac,~1),
("Constraint solving",fn ctxt => fn _ => Sepref_Constraints.process_constraint_slot ctxt, 0)
] (flag_phases_ctrl dbg) ctxt
val trans_tac = gen_trans_tac false
val trans_keep_tac = gen_trans_tac true
end
val setup = I
#> sepref_fr_rules.setup
#> sepref_comb_rules.setup
end
›
setup Sepref_Translate.setup
subsubsection ‹Basic Setup›
lemma hn_pass[sepref_fr_rules]:
shows "hn_refine (hn_ctxt P x x') (return x') (hn_invalid P x x') P (PASS$x)"
apply rule apply (sep_auto simp: hn_ctxt_def invalidate_clone')
done
lemma hn_bind[sepref_comb_rules]:
assumes D1: "hn_refine Γ m' Γ1 Rh m"
assumes D2:
"⋀x x'. bind_ref_tag x m ⟹
hn_refine (Γ1 * hn_ctxt Rh x x') (f' x') (Γ2 x x') R (f x)"
assumes IMP: "⋀x x'. Γ2 x x' ⟹⇩t Γ' * hn_ctxt Rx x x'"
shows "hn_refine Γ (m'⤜f') Γ' R (Refine_Basic.bind$m$(λ⇩2x. f x))"
using assms
unfolding APP_def PROTECT2_def bind_ref_tag_def
by (rule hnr_bind)
lemma hn_RECT'[sepref_comb_rules]:
assumes "INDEP Ry" "INDEP Rx" "INDEP Rx'"
assumes FR: "P ⟹⇩t hn_ctxt Rx ax px * F"
assumes S: "⋀cf af ax px. ⟦
⋀ax px. hn_refine (hn_ctxt Rx ax px * F) (cf px) (hn_ctxt Rx' ax px * F) Ry
(RCALL$af$ax)⟧
⟹ hn_refine (hn_ctxt Rx ax px * F) (cB cf px) (F' ax px) Ry
(aB af ax)"
assumes FR': "⋀ax px. F' ax px ⟹⇩t hn_ctxt Rx' ax px * F"
assumes M: "(⋀x. mono_Heap (λf. cB f x))"
shows "hn_refine
(P) (heap.fixp_fun cB px) (hn_ctxt Rx' ax px * F) Ry
(RECT$(λ⇩2D x. aB D x)$ax)"
unfolding APP_def PROTECT2_def
apply (rule hn_refine_cons_pre[OF FR])
apply (rule hnr_RECT)
apply (rule hn_refine_cons_post[OF _ FR'])
apply (rule S[unfolded RCALL_def APP_def])
apply assumption
apply fact+
done
lemma hn_RCALL[sepref_comb_rules]:
assumes "RPREM (hn_refine P' c Q' R (RCALL $ a $ b))"
and "P ⟹⇩t F * P'"
shows "hn_refine P c (F * Q') R (RCALL $ a $ b)"
using assms hn_refine_frame[where m="RCALL$a$b"]
by simp
definition "monadic_WHILEIT I b f s ≡ do {
RECT (λD s. do {
ASSERT (I s);
bv ← b s;
if bv then do {
s ← f s;
D s
} else do {RETURN s}
}) s
}"
definition "heap_WHILET b f s ≡ do {
heap.fixp_fun (λD s. do {
bv ← b s;
if bv then do {
s ← f s;
D s
} else do {return s}
}) s
}"
lemma heap_WHILET_unfold[code]: "heap_WHILET b f s =
do {
bv ← b s;
if bv then do {
s ← f s;
heap_WHILET b f s
} else
return s
}"
unfolding heap_WHILET_def
apply (subst heap.mono_body_fixp)
apply pf_mono
apply simp
done
lemma WHILEIT_to_monadic: "WHILEIT I b f s = monadic_WHILEIT I (λs. RETURN (b s)) f s"
unfolding WHILEIT_def monadic_WHILEIT_def
unfolding WHILEI_body_def bind_ASSERT_eq_if
by (simp cong: if_cong)
lemma WHILEIT_pat[def_pat_rules]:
"WHILEIT$I ≡ UNPROTECT (WHILEIT I)"
"WHILET ≡ PR_CONST (WHILEIT (λ_. True))"
by (simp_all add: WHILET_def)
lemma id_WHILEIT[id_rules]:
"PR_CONST (WHILEIT I) ::⇩i TYPE(('a ⇒ bool) ⇒ ('a ⇒ 'a nres) ⇒ 'a ⇒ 'a nres)"
by simp
lemma WHILE_arities[sepref_monadify_arity]:
"PR_CONST (WHILEIT I) ≡ λ⇩2b f s. SP (PR_CONST (WHILEIT I))$(λ⇩2s. b$s)$(λ⇩2s. f$s)$s"
by (simp_all add: WHILET_def)
lemma WHILEIT_comb[sepref_monadify_comb]:
"PR_CONST (WHILEIT I)$(λ⇩2x. b x)$f$s ≡
Refine_Basic.bind$(EVAL$s)$(λ⇩2s.
SP (PR_CONST (monadic_WHILEIT I))$(λ⇩2x. (EVAL$(b x)))$f$s
)"
by (simp_all add: WHILEIT_to_monadic)
lemma hn_monadic_WHILE_aux:
assumes FR: "P ⟹⇩t Γ * hn_ctxt Rs s' s"
assumes b_ref: "⋀s s'. I s' ⟹ hn_refine
(Γ * hn_ctxt Rs s' s)
(b s)
(Γb s' s)
(pure bool_rel)
(b' s')"
assumes b_fr: "⋀s' s. Γb s' s ⟹⇩t Γ * hn_ctxt Rs s' s"
assumes f_ref: "⋀s' s. ⟦I s'⟧ ⟹ hn_refine
(Γ * hn_ctxt Rs s' s)
(f s)
(Γf s' s)
Rs
(f' s')"
assumes f_fr: "⋀s' s. Γf s' s ⟹⇩t Γ * hn_ctxt (λ_ _. true) s' s"
shows "hn_refine (P) (heap_WHILET b f s) (Γ * hn_invalid Rs s' s) Rs (monadic_WHILEIT I b' f' s')"
unfolding monadic_WHILEIT_def heap_WHILET_def
apply1 (rule hn_refine_cons_pre[OF FR])
apply weaken_hnr_post
focus (rule hn_refine_cons_pre[OF _ hnr_RECT])
applyS (subst mult_ac(2)[of Γ]; rule entt_refl; fail)
apply1 (rule hnr_ASSERT)
focus (rule hnr_bind)
focus (rule hn_refine_cons[OF _ b_ref b_fr entt_refl])
applyS (simp add: star_aci)
applyS assumption
solved
focus (rule hnr_If)
applyS (sep_auto; fail)
focus (rule hnr_bind)
focus (rule hn_refine_cons[OF _ f_ref f_fr entt_refl])
apply (sep_auto simp: hn_ctxt_def pure_def intro!: enttI; fail)
apply assumption
solved
focus (rule hn_refine_frame)
applyS rprems
applyS (rule enttI; solve_entails)
solved
apply (sep_auto intro!: enttI; fail)
solved
applyF (sep_auto,rule hn_refine_frame)
applyS (rule hnr_RETURN_pass)
apply (rule enttI)
apply (fr_rot_rhs 1)
apply (fr_rot 1, rule fr_refl)
apply (rule fr_refl)
apply solve_entails
solved
apply (rule entt_refl)
solved
apply (rule enttI)
applyF (rule ent_disjE)
apply1 (sep_auto simp: hn_ctxt_def pure_def)
apply1 (rule ent_true_drop)
apply1 (rule ent_true_drop)
applyS (rule ent_refl)
applyS (sep_auto simp: hn_ctxt_def pure_def)
solved
solved
apply pf_mono
solved
done
lemma hn_monadic_WHILE_lin[sepref_comb_rules]:
assumes "INDEP Rs"
assumes FR: "P ⟹⇩t Γ * hn_ctxt Rs s' s"
assumes b_ref: "⋀s s'. I s' ⟹ hn_refine
(Γ * hn_ctxt Rs s' s)
(b s)
(Γb s' s)
(pure bool_rel)
(b' s')"
assumes b_fr: "⋀s' s. TERM (monadic_WHILEIT,''cond'') ⟹ Γb s' s ⟹⇩t Γ * hn_ctxt Rs s' s"
assumes f_ref: "⋀s' s. I s' ⟹ hn_refine
(Γ * hn_ctxt Rs s' s)
(f s)
(Γf s' s)
Rs
(f' s')"
assumes f_fr: "⋀s' s. TERM (monadic_WHILEIT,''body'') ⟹ Γf s' s ⟹⇩t Γ * hn_ctxt (λ_ _. true) s' s"
shows "hn_refine
P
(heap_WHILET b f s)
(Γ * hn_invalid Rs s' s)
Rs
(PR_CONST (monadic_WHILEIT I)$(λ⇩2s'. b' s')$(λ⇩2s'. f' s')$(s'))"
using assms(2-)
unfolding APP_def PROTECT2_def CONSTRAINT_def PR_CONST_def
by (rule hn_monadic_WHILE_aux)
lemma monadic_WHILEIT_refine[refine]:
assumes [refine]: "(s',s) ∈ R"
assumes [refine]: "⋀s' s. ⟦ (s',s)∈R; I s ⟧ ⟹ I' s'"
assumes [refine]: "⋀s' s. ⟦ (s',s)∈R; I s; I' s' ⟧ ⟹ b' s' ≤⇓bool_rel (b s)"
assumes [refine]: "⋀s' s. ⟦ (s',s)∈R; I s; I' s'; nofail (b s); inres (b s) True ⟧ ⟹ f' s' ≤⇓R (f s)"
shows "monadic_WHILEIT I' b' f' s' ≤⇓R (monadic_WHILEIT I b f s)"
unfolding monadic_WHILEIT_def
by (refine_rcg bind_refine'; assumption?; auto)
lemma monadic_WHILEIT_refine_WHILEIT[refine]:
assumes [refine]: "(s',s) ∈ R"
assumes [refine]: "⋀s' s. ⟦ (s',s)∈R; I s ⟧ ⟹ I' s'"
assumes [THEN order_trans,refine_vcg]: "⋀s' s. ⟦ (s',s)∈R; I s; I' s' ⟧ ⟹ b' s' ≤ SPEC (λr. r = b s)"
assumes [refine]: "⋀s' s. ⟦ (s',s)∈R; I s; I' s'; b s ⟧ ⟹ f' s' ≤⇓R (f s)"
shows "monadic_WHILEIT I' b' f' s' ≤⇓R (WHILEIT I b f s)"
unfolding WHILEIT_to_monadic
by (refine_vcg; assumption?; auto)
lemma monadic_WHILEIT_refine_WHILET[refine]:
assumes [refine]: "(s',s) ∈ R"
assumes [THEN order_trans,refine_vcg]: "⋀s' s. ⟦ (s',s)∈R ⟧ ⟹ b' s' ≤ SPEC (λr. r = b s)"
assumes [refine]: "⋀s' s. ⟦ (s',s)∈R; b s ⟧ ⟹ f' s' ≤⇓R (f s)"
shows "monadic_WHILEIT (λ_. True) b' f' s' ≤⇓R (WHILET b f s)"
unfolding WHILET_def
by (refine_vcg; assumption?)
lemma monadic_WHILEIT_pat[def_pat_rules]:
"monadic_WHILEIT$I ≡ UNPROTECT (monadic_WHILEIT I)"
by auto
lemma id_monadic_WHILEIT[id_rules]:
"PR_CONST (monadic_WHILEIT I) ::⇩i TYPE(('a ⇒ bool nres) ⇒ ('a ⇒ 'a nres) ⇒ 'a ⇒ 'a nres)"
by simp
lemma monadic_WHILEIT_arities[sepref_monadify_arity]:
"PR_CONST (monadic_WHILEIT I) ≡ λ⇩2b f s. SP (PR_CONST (monadic_WHILEIT I))$(λ⇩2s. b$s)$(λ⇩2s. f$s)$s"
by (simp)
lemma monadic_WHILEIT_comb[sepref_monadify_comb]:
"PR_CONST (monadic_WHILEIT I)$b$f$s ≡
Refine_Basic.bind$(EVAL$s)$(λ⇩2s.
SP (PR_CONST (monadic_WHILEIT I))$b$f$s
)"
by (simp)
definition [simp]: "op_ASSERT_bind I m ≡ Refine_Basic.bind (ASSERT I) (λ_. m)"
lemma pat_ASSERT_bind[def_pat_rules]:
"Refine_Basic.bind$(ASSERT$I)$(λ⇩2_. m) ≡ UNPROTECT (op_ASSERT_bind I)$m"
by simp
term "PR_CONST (op_ASSERT_bind I)"
lemma id_op_ASSERT_bind[id_rules]:
"PR_CONST (op_ASSERT_bind I) ::⇩i TYPE('a nres ⇒ 'a nres)"
by simp
lemma arity_ASSERT_bind[sepref_monadify_arity]:
"PR_CONST (op_ASSERT_bind I) ≡ λ⇩2m. SP (PR_CONST (op_ASSERT_bind I))$m"
apply (rule eq_reflection)
by auto
lemma hn_ASSERT_bind[sepref_comb_rules]:
assumes "I ⟹ hn_refine Γ c Γ' R m"
shows "hn_refine Γ c Γ' R (PR_CONST (op_ASSERT_bind I)$m)"
using assms
apply (cases I)
apply auto
done
definition [simp]: "op_ASSUME_bind I m ≡ Refine_Basic.bind (ASSUME I) (λ_. m)"
lemma pat_ASSUME_bind[def_pat_rules]:
"Refine_Basic.bind$(ASSUME$I)$(λ⇩2_. m) ≡ UNPROTECT (op_ASSUME_bind I)$m"
by simp
lemma id_op_ASSUME_bind[id_rules]:
"PR_CONST (op_ASSUME_bind I) ::⇩i TYPE('a nres ⇒ 'a nres)"
by simp
lemma arity_ASSUME_bind[sepref_monadify_arity]:
"PR_CONST (op_ASSUME_bind I) ≡ λ⇩2m. SP (PR_CONST (op_ASSUME_bind I))$m"
apply (rule eq_reflection)
by auto
lemma hn_ASSUME_bind[sepref_comb_rules]:
assumes "vassn_tag Γ ⟹ I"
assumes "I ⟹ hn_refine Γ c Γ' R m"
shows "hn_refine Γ c Γ' R (PR_CONST (op_ASSUME_bind I)$m)"
apply (rule hn_refine_preI)
using assms
apply (cases I)
apply (auto simp: vassn_tag_def)
done
subsection "Import of Parametricity Theorems"
lemma pure_hn_refineI:
assumes "Q ⟶ (c,a)∈R"
shows "hn_refine (↑Q) (return c) (↑Q) (pure R) (RETURN a)"
unfolding hn_refine_def using assms
by (sep_auto simp: pure_def)
lemma pure_hn_refineI_no_asm:
assumes "(c,a)∈R"
shows "hn_refine emp (return c) emp (pure R) (RETURN a)"
unfolding hn_refine_def using assms
by (sep_auto simp: pure_def)
lemma import_param_0:
"(P⟹Q) ≡ Trueprop (PROTECT P ⟶ Q)"
apply (rule, simp+)+
done
lemma import_param_1:
"(P⟹Q) ≡ Trueprop (P⟶Q)"
"(P⟶Q⟶R) ⟷ (P∧Q ⟶ R)"
"PROTECT (P ∧ Q) ≡ PROTECT P ∧ PROTECT Q"
"(P ∧ Q) ∧ R ≡ P ∧ Q ∧ R"
"(a,c)∈Rel ∧ PROTECT P ⟷ PROTECT P ∧ (a,c)∈Rel"
apply (rule, simp+)+
done
lemma import_param_2:
"Trueprop (PROTECT P ∧ Q ⟶ R) ≡ (P ⟹ Q⟶R)"
apply (rule, simp+)+
done
lemma import_param_3:
"↑(P ∧ Q) = ↑P*↑Q"
"↑((c,a)∈R) = hn_val R a c"
by (simp_all add: hn_ctxt_def pure_def)
named_theorems_rev sepref_import_rewrite ‹Rewrite rules on importing parametricity theorems›
lemma to_import_frefD:
assumes "(f,g)∈fref P R S"
shows "⟦PROTECT (P y); (x,y)∈R⟧ ⟹ (f x, g y)∈S"
using assms
unfolding fref_def
by auto
lemma add_PR_CONST: "(c,a)∈R ⟹ (c,PR_CONST a)∈R" by simp
ML ‹
structure Sepref_Import_Param = struct
fun to_import_fo ctxt thm = let
val unf_thms = @{thms
split_tupled_all prod_rel_simp uncurry_apply cnv_conj_to_meta Product_Type.split}
in
case Thm.concl_of thm of
@{mpat "Trueprop ((_,_) ∈ fref _ _ _)"} =>
(@{thm to_import_frefD} OF [thm])
|> forall_intr_vars
|> Local_Defs.unfold0 ctxt unf_thms
|> Variable.gen_all ctxt
| @{mpat "Trueprop ((_,_) ∈ _)"} =>
Parametricity.fo_rule thm
| _ => raise THM("Expected parametricity or fref theorem",~1,[thm])
end
fun add_PR_CONST thm = case Thm.concl_of thm of
@{mpat "Trueprop ((_,_) ∈ fref _ _ _)"} => thm
| @{mpat "Trueprop ((_,PR_CONST _) ∈ _)"} => thm
| @{mpat "Trueprop ((_,?a) ∈ _)"} => if is_Const a orelse is_Free a orelse is_Var a then
thm
else
thm RS @{thm add_PR_CONST}
| _ => thm
fun import ctxt thm = let
open Sepref_Basic
val thm = thm
|> Conv.fconv_rule Thm.eta_conversion
|> add_PR_CONST
|> Local_Defs.unfold0 ctxt @{thms import_param_0}
|> Local_Defs.unfold0 ctxt @{thms imp_to_meta}
|> to_import_fo ctxt
|> Local_Defs.unfold0 ctxt @{thms import_param_1}
|> Local_Defs.unfold0 ctxt @{thms import_param_2}
val thm = case Thm.concl_of thm of
@{mpat "Trueprop (_⟶_)"} => thm RS @{thm pure_hn_refineI}
| _ => thm RS @{thm pure_hn_refineI_no_asm}
val thm = Local_Defs.unfold0 ctxt @{thms import_param_3} thm
|> Conv.fconv_rule (hn_refine_concl_conv_a (K (Id_Op.protect_conv ctxt)) ctxt)
val thm = Local_Defs.unfold0 ctxt (Named_Theorems_Rev.get ctxt @{named_theorems_rev sepref_import_rewrite}) thm
val thm = Sepref_Rules.add_pure_constraints_rule ctxt thm
in
thm
end
val import_attr = Scan.succeed (Thm.mixed_attribute (fn (context,thm) =>
let
val thm = import (Context.proof_of context) thm
val context = Sepref_Translate.sepref_fr_rules.add_thm thm context
in (context,thm) end
))
val import_attr_rl = Scan.succeed (Thm.rule_attribute [] (fn context =>
import (Context.proof_of context) #> Sepref_Rules.ensure_hfref (Context.proof_of context)
))
val setup = I
#> Attrib.setup @{binding sepref_import_param} import_attr
"Sepref: Import parametricity rule"
#> Attrib.setup @{binding sepref_param} import_attr_rl
"Sepref: Transform parametricity rule to sepref rule"
#> Attrib.setup @{binding sepref_dbg_import_rl_only}
(Scan.succeed (Thm.rule_attribute [] (import o Context.proof_of)))
"Sepref: Parametricity to hnr-rule, no conversion to hfref"
end
›
setup Sepref_Import_Param.setup
subsection "Purity"
definition "import_rel1 R ≡ λA c ci. ↑(is_pure A ∧ (ci,c)∈⟨the_pure A⟩R)"
definition "import_rel2 R ≡ λA B c ci. ↑(is_pure A ∧ is_pure B ∧ (ci,c)∈⟨the_pure A, the_pure B⟩R)"
lemma import_rel1_pure_conv: "import_rel1 R (pure A) = pure (⟨A⟩R)"
unfolding import_rel1_def
apply simp
apply (simp add: pure_def)
done
lemma import_rel2_pure_conv: "import_rel2 R (pure A) (pure B) = pure (⟨A,B⟩R)"
unfolding import_rel2_def
apply simp
apply (simp add: pure_def)
done
lemma precise_pure[constraint_rules]: "single_valued R ⟹ precise (pure R)"
unfolding precise_def pure_def
by (auto dest: single_valuedD)
lemma precise_pure_iff_sv: "precise (pure R) ⟷ single_valued R"
apply (auto simp: precise_pure)
using preciseD[where R="pure R" and F=emp and F'=emp]
by (sep_auto simp: mod_and_dist intro: single_valuedI)
lemma pure_precise_iff_sv: "⟦is_pure R⟧
⟹ precise R ⟷ single_valued (the_pure R)"
by (auto simp: is_pure_conv precise_pure_iff_sv)
lemmas [safe_constraint_rules] = single_valued_Id br_sv
end
Theory Sepref_Definition
section ‹Sepref-Definition Command›
theory Sepref_Definition
imports Sepref_Rules "Lib/Pf_Mono_Prover" "Lib/Term_Synth"
keywords "sepref_definition" :: thy_goal
and "sepref_thm" :: thy_goal
begin
subsection ‹Setup of Extraction-Tools›
declare [[cd_patterns "hn_refine _ ?f _ _ _"]]
lemma heap_fixp_codegen:
assumes DEF: "f ≡ heap.fixp_fun cB"
assumes M: "(⋀x. mono_Heap (λf. cB f x))"
shows "f x = cB f x"
unfolding DEF
apply (rule fun_cong[of _ _ x])
apply (rule heap.mono_body_fixp)
apply fact
done
ML ‹
structure Sepref_Extraction = struct
val heap_extraction: Refine_Automation.extraction = {
pattern = Logic.varify_global @{term "heap.fixp_fun x"},
gen_thm = @{thm heap_fixp_codegen},
gen_tac = (fn ctxt =>
Pf_Mono_Prover.mono_tac ctxt
)
}
val setup = I
#> Refine_Automation.add_extraction "heap" heap_extraction
end
›
setup Sepref_Extraction.setup
subsection ‹Synthesis setup for sepref-definition goals›
consts UNSPEC::'a
abbreviation hfunspec
:: "('a ⇒ 'b ⇒ assn) ⇒ ('a ⇒ 'b ⇒ assn)×('a ⇒ 'b ⇒ assn)"
("(_⇧?)" [1000] 999)
where "R⇧? ≡ hf_pres R UNSPEC"
definition SYNTH :: "('a ⇒ 'r nres) ⇒ (('ai ⇒'ri Heap) × ('a ⇒ 'r nres)) set ⇒ bool"
where "SYNTH f R ≡ True"
definition [simp]: "CP_UNCURRY _ _ ≡ True"
definition [simp]: "INTRO_KD _ _ ≡ True"
definition [simp]: "SPEC_RES_ASSN _ _ ≡ True"
lemma [synth_rules]: "CP_UNCURRY f g" by simp
lemma [synth_rules]: "CP_UNCURRY (uncurry0 f) (uncurry0 g)" by simp
lemma [synth_rules]: "CP_UNCURRY f g ⟹ CP_UNCURRY (uncurry f) (uncurry g)" by simp
lemma [synth_rules]: "⟦INTRO_KD R1 R1'; INTRO_KD R2 R2'⟧ ⟹ INTRO_KD (R1*⇩aR2) (R1'*⇩aR2')" by simp
lemma [synth_rules]: "INTRO_KD (R⇧?) (hf_pres R k)" by simp
lemma [synth_rules]: "INTRO_KD (R⇧k) (R⇧k)" by simp
lemma [synth_rules]: "INTRO_KD (R⇧d) (R⇧d)" by simp
lemma [synth_rules]: "SPEC_RES_ASSN R R" by simp
lemma [synth_rules]: "SPEC_RES_ASSN UNSPEC R" by simp
lemma synth_hnrI:
"⟦CP_UNCURRY fi f; INTRO_KD R R'; SPEC_RES_ASSN S S'⟧ ⟹ SYNTH_TERM (SYNTH f ([P]⇩a R→S)) ((fi,SDUMMY)∈SDUMMY,(fi,f)∈([P]⇩a R'→S'))"
by (simp add: SYNTH_def)
term starts_with
ML ‹
structure Sepref_Definition = struct
fun make_hnr_goal t ctxt = let
val ctxt = Variable.declare_term t ctxt
val (pat,goal) = case Term_Synth.synth_term @{thms synth_hnrI} ctxt t of
@{mpat "(?pat,?goal)"} => (pat,goal) | t => raise TERM("Synthesized term does not match",[t])
val pat = Thm.cterm_of ctxt pat |> Refine_Automation.prepare_cd_pattern ctxt
val goal = HOLogic.mk_Trueprop goal
in
((pat,goal),ctxt)
end
val cfg_prep_code = Attrib.setup_config_bool @{binding sepref_definition_prep_code} (K true)
local
open Refine_Util
val flags = parse_bool_config' "prep_code" cfg_prep_code
val parse_flags = parse_paren_list' flags
in
val sd_parser = parse_flags -- Parse.binding -- Parse.opt_attribs --| @{keyword "is"}
-- Parse.term --| @{keyword "::"} -- Parse.term
end
fun mk_synth_term ctxt t_raw r_raw = let
val t = Syntax.parse_term ctxt t_raw
val r = Syntax.parse_term ctxt r_raw
val t = Const (@{const_name SYNTH},dummyT)$t$r
in
Syntax.check_term ctxt t
end
fun sd_cmd ((((flags,name),attribs),t_raw),r_raw) lthy = let
local
val ctxt = Refine_Util.apply_configs flags lthy
in
val flag_prep_code = Config.get ctxt cfg_prep_code
end
val t = mk_synth_term lthy t_raw r_raw
val ((pat,goal),ctxt) = make_hnr_goal t lthy
fun
after_qed [[thm]] ctxt = let
val thm = singleton (Variable.export ctxt lthy) thm
val (_,lthy)
= Local_Theory.note
((Refine_Automation.mk_qualified (Binding.name_of name) "refine_raw",[]),[thm])
lthy;
val ((dthm,rthm),lthy) = Refine_Automation.define_concrete_fun NONE name attribs [] thm [pat] lthy
val lthy = lthy
|> flag_prep_code ? Refine_Automation.extract_recursion_eqs
[Sepref_Extraction.heap_extraction] (Binding.name_of name) dthm
val _ = Thm.pretty_thm lthy dthm |> Pretty.string_of |> writeln
val _ = Thm.pretty_thm lthy rthm |> Pretty.string_of |> writeln
in
lthy
end
| after_qed thmss _ = raise THM ("After-qed: Wrong thmss structure",~1,flat thmss)
in
Proof.theorem NONE after_qed [[ (goal,[]) ]] ctxt
end
val _ = Outer_Syntax.local_theory_to_proof @{command_keyword "sepref_definition"}
"Synthesis of imperative program"
(sd_parser >> sd_cmd)
val st_parser = Parse.binding --| @{keyword "is"} -- Parse.term --| @{keyword "::"} -- Parse.term
fun st_cmd ((name,t_raw),r_raw) lthy = let
val t = mk_synth_term lthy t_raw r_raw
val ((_,goal),ctxt) = make_hnr_goal t lthy
fun
after_qed [[thm]] ctxt = let
val thm = singleton (Variable.export ctxt lthy) thm
val _ = Thm.pretty_thm lthy thm |> Pretty.string_of |> tracing
val (_,lthy)
= Local_Theory.note
((Refine_Automation.mk_qualified (Binding.name_of name) "refine_raw",[]),[thm])
lthy;
in
lthy
end
| after_qed thmss _ = raise THM ("After-qed: Wrong thmss structure",~1,flat thmss)
in
Proof.theorem NONE after_qed [[ (goal,[]) ]] ctxt
end
val _ = Outer_Syntax.local_theory_to_proof @{command_keyword "sepref_thm"}
"Synthesis of imperative program: Only generate raw refinement theorem"
(st_parser >> st_cmd)
end
›
end
Theory Sepref_Intf_Util
section ‹Utilities for Interface Specifications and Implementations›
theory Sepref_Intf_Util
imports Sepref_Rules Sepref_Translate "Lib/Term_Synth" Sepref_Combinator_Setup
"Lib/Concl_Pres_Clarification"
keywords "sepref_decl_op" :: thy_goal
and "sepref_decl_impl" :: thy_goal
begin
subsection ‹Relation Interface Binding›
definition INTF_OF_REL :: "('a×'b) set ⇒ 'c itself ⇒ bool"
where [simp]: "INTF_OF_REL R I ≡ True"
lemma intf_of_relI: "INTF_OF_REL (R::(_×'a) set) TYPE('a)" by simp
declare intf_of_relI[synth_rules]
lemma [synth_rules]:
"INTF_OF_REL unit_rel TYPE(unit)"
"INTF_OF_REL nat_rel TYPE(nat)"
"INTF_OF_REL int_rel TYPE(int)"
"INTF_OF_REL bool_rel TYPE(bool)"
"INTF_OF_REL R TYPE('a) ⟹ INTF_OF_REL (⟨R⟩option_rel) TYPE('a option)"
"INTF_OF_REL R TYPE('a) ⟹ INTF_OF_REL (⟨R⟩list_rel) TYPE('a list)"
"INTF_OF_REL R TYPE('a) ⟹ INTF_OF_REL (⟨R⟩nres_rel) TYPE('a nres)"
"⟦INTF_OF_REL R TYPE('a); INTF_OF_REL S TYPE('b)⟧ ⟹ INTF_OF_REL (R×⇩rS) TYPE('a×'b)"
"⟦INTF_OF_REL R TYPE('a); INTF_OF_REL S TYPE('b)⟧ ⟹ INTF_OF_REL (⟨R,S⟩sum_rel) TYPE('a+'b)"
"⟦INTF_OF_REL R TYPE('a); INTF_OF_REL S TYPE('b)⟧ ⟹ INTF_OF_REL (R→S) TYPE('a⇒'b)"
by simp_all
lemma synth_intf_of_relI: "INTF_OF_REL R I ⟹ SYNTH_TERM R I" by simp
subsection ‹Operations with Precondition›
definition mop :: "('a⇒bool) ⇒ ('a⇒'b nres) ⇒ 'a ⇒ 'b nres"
where [simp]: "mop P f ≡ λx. ASSERT (P x) ⪢ f x"
lemma param_op_mop_iff:
assumes "(Q,P)∈R→bool_rel"
shows
"(f, g) ∈ [P]⇩f R → ⟨S⟩nres_rel
⟷
(mop Q f, mop P g) ∈ R →⇩f ⟨S⟩nres_rel
"
using assms
by (auto
simp: mop_def fref_def pw_nres_rel_iff refine_pw_simps
dest: fun_relD)
lemma param_mopI:
assumes "(f,g) ∈ [P]⇩f R → ⟨S⟩nres_rel"
assumes "(Q,P) ∈ R → bool_rel"
shows "(mop Q f, mop P g) ∈ R →⇩f ⟨S⟩nres_rel"
using assms by (simp add: param_op_mop_iff)
lemma mop_spec_rl: "P x ⟹ mop P f x ≤ f x" by simp
lemma mop_spec_rl_from_def:
assumes "f ≡ mop P g"
assumes "P x"
assumes "g x ≤ z"
shows "f x ≤ z"
using assms mop_spec_rl by simp
lemma mop_leof_rl_from_def:
assumes "f ≡ mop P g"
assumes "P x ⟹ g x ≤⇩n z"
shows "f x ≤⇩n z"
using assms
by (simp add: pw_leof_iff refine_pw_simps)
lemma assert_true_bind_conv: "ASSERT True ⪢ m = m" by simp
lemmas mop_alt_unfolds = curry_def curry0_def mop_def uncurry_apply uncurry0_apply o_apply assert_true_bind_conv
subsection ‹Constraints›
lemma add_is_pure_constraint: "⟦PROP P; CONSTRAINT is_pure A⟧ ⟹ PROP P" .
lemma sepref_relpropI: "P R = CONSTRAINT P R" by simp
subsubsection ‹Purity›
lemmas [constraint_simps] = the_pure_pure
definition [constraint_abbrevs]: "IS_PURE P R ≡ is_pure R ∧ P (the_pure R)"
lemma IS_PURE_pureI:
"P R ⟹ IS_PURE P (pure R)"
by (auto simp: IS_PURE_def)
lemma [fcomp_norm_simps]: "CONSTRAINT (IS_PURE Φ) P ⟹ pure (the_pure P) = P"
by (simp add: IS_PURE_def)
lemma [fcomp_norm_simps]: "CONSTRAINT (IS_PURE P) A ⟹ P (the_pure A)"
by (auto simp: IS_PURE_def)
lemma handle_purity1:
"CONSTRAINT (IS_PURE Φ) A ⟹ CONSTRAINT Φ (the_pure A)"
by (auto simp: IS_PURE_def)
lemma handle_purity2:
"CONSTRAINT (IS_PURE Φ) A ⟹ CONSTRAINT is_pure A"
by (auto simp: IS_PURE_def)
subsection ‹Composition›
subsubsection ‹Preconditions›
definition [simp]: "tcomp_pre Q T P ≡ λa. Q a ∧ (∀a'. (a', a) ∈ T ⟶ P a')"
definition "and_pre P1 P2 ≡ λx. P1 x ∧ P2 x"
definition "imp_pre P1 P2 ≡ λx. P1 x ⟶ P2 x"
lemma and_pre_beta: "PP ⟶ P x ∧ Q x ⟹ PP ⟶ and_pre P Q x" by (auto simp: and_pre_def)
lemma imp_pre_beta: "PP ⟶ P x ⟶ Q x ⟹ PP ⟶ imp_pre P Q x" by (auto simp: imp_pre_def)
definition "IMP_PRE P1 P2 ≡ ∀x. P1 x ⟶ P2 x"
lemma IMP_PRED: "IMP_PRE P1 P2 ⟹ P1 x ⟹ P2 x" unfolding IMP_PRE_def by auto
lemma IMP_PRE_refl: "IMP_PRE P P" unfolding IMP_PRE_def by auto
definition "IMP_PRE_CUSTOM ≡ IMP_PRE"
lemma IMP_PRE_CUSTOMD: "IMP_PRE_CUSTOM P1 P2 ⟹ IMP_PRE P1 P2" by (simp add: IMP_PRE_CUSTOM_def)
lemma IMP_PRE_CUSTOMI: "⟦⋀x. P1 x ⟹ P2 x⟧ ⟹ IMP_PRE_CUSTOM P1 P2"
by (simp add: IMP_PRE_CUSTOM_def IMP_PRE_def)
lemma imp_and_triv_pre: "IMP_PRE P (and_pre (λ_. True) P)"
unfolding IMP_PRE_def and_pre_def by auto
subsubsection ‹Premises›
definition "ALL_LIST A ≡ (∀x∈set A. x)"
definition "IMP_LIST A B ≡ ALL_LIST A ⟶ B"
lemma to_IMP_LISTI:
"P ⟹ IMP_LIST [] P"
by (auto simp: IMP_LIST_def)
lemma to_IMP_LIST: "(P ⟹ IMP_LIST Ps Q) ≡ Trueprop (IMP_LIST (P#Ps) Q)"
by (auto simp: IMP_LIST_def ALL_LIST_def intro!: equal_intr_rule)
lemma from_IMP_LIST:
"Trueprop (IMP_LIST As B) ≡ (ALL_LIST As ⟹ B)"
"(ALL_LIST [] ⟹ B) ≡ Trueprop B"
"(ALL_LIST (A#As) ⟹ B) ≡ (A ⟹ ALL_LIST As ⟹ B)"
by (auto simp: IMP_LIST_def ALL_LIST_def intro!: equal_intr_rule)
lemma IMP_LIST_trivial: "IMP_LIST A B ⟹ IMP_LIST A B" .
subsubsection ‹Composition Rules›
lemma hfcomp_tcomp_pre:
assumes B: "(g,h) ∈ [Q]⇩f T → ⟨U⟩nres_rel"
assumes A: "(f,g) ∈ [P]⇩a RR' → S"
shows "(f,h) ∈ [tcomp_pre Q T P]⇩a hrp_comp RR' T → hr_comp S U"
using hfcomp[OF A B] by simp
lemma transform_pre_param:
assumes A: "IMP_LIST Cns ((f, h) ∈ [tcomp_pre Q T P]⇩a hrp_comp RR' T → hr_comp S U)"
assumes P: "IMP_LIST Cns ((P,P') ∈ T → bool_rel)"
assumes C: "IMP_PRE PP' (and_pre P' Q)"
shows "IMP_LIST Cns ((f,h) ∈ [PP']⇩a hrp_comp RR' T → hr_comp S U)"
unfolding from_IMP_LIST
apply (rule hfref_cons)
apply (rule A[unfolded from_IMP_LIST])
apply assumption
apply (drule IMP_PRED[OF C])
using P[unfolded from_IMP_LIST] unfolding and_pre_def
apply (auto dest: fun_relD) []
by simp_all
lemma hfref_mop_conv: "((g,mop P f) ∈ [Q]⇩a R → S) ⟷ (g,f) ∈ [λx. P x ∧ Q x]⇩a R → S"
apply (simp add: hfref_to_ASSERT_conv)
apply (fo_rule arg_cong fun_cong)+
by (auto intro!: ext simp: pw_eq_iff refine_pw_simps)
lemma hfref_op_to_mop:
assumes R: "(impl,f) ∈ [Q]⇩a R → S"
assumes DEF: "mf ≡ mop P f"
assumes C: "IMP_PRE PP' (imp_pre P Q)"
shows "(impl,mf) ∈ [PP']⇩a R → S"
unfolding DEF hfref_mop_conv
apply (rule hfref_cons[OF R])
using C
by (auto simp: IMP_PRE_def imp_pre_def)
lemma hfref_mop_to_op:
assumes R: "(impl,mf) ∈ [Q]⇩a R → S"
assumes DEF: "mf ≡ mop P f"
assumes C: "IMP_PRE PP' (and_pre Q P)"
shows "(impl,f) ∈ [PP']⇩a R → S"
using R unfolding DEF hfref_mop_conv
apply (rule hfref_cons)
using C
apply (auto simp: and_pre_def IMP_PRE_def)
done
subsubsection ‹Precondition Simplification›
lemma IMP_PRE_eqI:
assumes "⋀x. P x ⟶ Q x"
assumes "CNV P P'"
shows "IMP_PRE P' Q"
using assms by (auto simp: IMP_PRE_def)
lemma simp_and1:
assumes "Q ⟹ CNV P P'"
assumes "PP ⟶ P' ∧ Q"
shows "PP ⟶ P ∧ Q"
using assms by auto
lemma simp_and2:
assumes "P ⟹ CNV Q Q'"
assumes "PP ⟶ P ∧ Q'"
shows "PP ⟶ P ∧ Q"
using assms by auto
lemma triv_and1: "Q ⟶ True ∧ Q" by blast
lemma simp_imp:
assumes "P ⟹ CNV Q Q'"
assumes "PP ⟶ Q'"
shows "PP ⟶ (P ⟶ Q)"
using assms by auto
lemma CNV_split:
assumes "CNV A A'"
assumes "CNV B B'"
shows "CNV (A ∧ B) (A' ∧ B')"
using assms by auto
lemma CNV_prove:
assumes "P"
shows "CNV P True"
using assms by auto
lemma simp_pre_final_simp:
assumes "CNV P P'"
shows "P' ⟶ P"
using assms by auto
lemma auto_weaken_pre_uncurry_step':
assumes "PROTECT f a ≡ f'"
shows "PROTECT (uncurry f) (a,b) ≡ f' b"
using assms
by (auto simp: curry_def dest!: meta_eq_to_obj_eq intro!: eq_reflection)
subsection ‹Protected Constants›
lemma add_PR_CONST_to_def: "x≡y ⟹ PR_CONST x ≡ y" by simp
subsection ‹Rule Collections›
named_theorems_rev sepref_mop_def_thms ‹Sepref: mop - definition theorems›
named_theorems_rev sepref_fref_thms ‹Sepref: fref-theorems›
named_theorems sepref_relprops_transform ‹Sepref: Simp-rules to transform relator properties›
named_theorems sepref_relprops ‹Sepref: Simp-rules to add CONSTRAINT-tags to relator properties›
named_theorems sepref_relprops_simps ‹Sepref: Simp-rules to simplify relator properties›
subsubsection ‹Default Setup›
subsection ‹ML-Level Declarations›
ML ‹
signature SEPREF_INTF_UTIL = sig
val list_filtered_subterms: (term -> 'a option) -> term -> 'a list
val get_intf_of_rel: Proof.context -> term -> typ
val to_assns_rl: bool -> Proof.context -> thm -> thm
val cleanup_constraints: Proof.context -> thm -> thm
val simp_precond_tac: Proof.context -> tactic'
val cfg_def: bool Config.T
val cfg_ismop: bool Config.T
val cfg_mop: bool Config.T
val cfg_rawgoals: bool Config.T
end
structure Sepref_Intf_Util: SEPREF_INTF_UTIL = struct
val cfg_debug =
Attrib.setup_config_bool @{binding sepref_debug_intf_util} (K false)
val dbg_trace = Sepref_Debugging.dbg_trace_msg cfg_debug
val dbg_msg_tac = Sepref_Debugging.dbg_msg_tac cfg_debug
fun list_filtered_subterms f t = let
fun r t = case f t of
SOME a => [a]
| NONE => (
case t of
t1$t2 => r t1 @ r t2
| Abs (_,_,t) => r t
| _ => []
)
in
r t
end
fun get_intf_of_rel ctxt R =
Term_Synth.synth_term @{thms synth_intf_of_relI} ctxt R
|> fastype_of
|> Refine_Util.dest_itselfT
local
fun add_is_pure_constraint ctxt v thm = let
val v = Thm.cterm_of ctxt v
val rl = Drule.infer_instantiate' ctxt [NONE, SOME v] @{thm add_is_pure_constraint}
in
thm RS rl
end
in
fun to_assns_rl add_pure_constr ctxt thm = let
val orig_ctxt = ctxt
val (thm,ctxt) = yield_singleton (apfst snd oo Variable.importT) thm ctxt
val (R,S) = case Thm.concl_of thm of @{mpat "Trueprop (_∈fref _ ?R ?S)"} => (R,S)
| _ => raise THM("to_assns_rl: expected fref-thm",~1,[thm])
fun mk_cn_subst (fname,(iname,C,A)) =
let
val T' = A --> C --> @{typ assn}
val v' = Free (fname,T')
val ct' = @{mk_term "the_pure ?v'"} |> Thm.cterm_of ctxt
in
(v',(iname,ct'))
end
fun relation_flt (name,Type (@{type_name set},[Type (@{type_name prod},[C,A])])) = SOME (name,C,A)
| relation_flt _ = NONE
val vars = []
|> Term.add_vars R
|> Term.add_vars S
|> map_filter (relation_flt)
val (names,ctxt) = Variable.variant_fixes (map (#1 #> fst) vars) ctxt
val cn_substs = map mk_cn_subst (names ~~ vars)
val thm = Drule.infer_instantiate ctxt (map snd cn_substs) thm
val thm = thm |> add_pure_constr ? fold (fn (v,_) => fn thm => add_is_pure_constraint ctxt v thm) cn_substs
val thm = singleton (Variable.export ctxt orig_ctxt) thm
in
thm
end
fun cleanup_constraints ctxt thm = let
val orig_ctxt = ctxt
val (thm, ctxt) = yield_singleton (apfst snd oo Variable.import true) thm ctxt
val xform_thms = Named_Theorems.get ctxt @{named_theorems sepref_relprops_transform}
val rprops_thms = Named_Theorems.get ctxt @{named_theorems sepref_relprops}
val simp_thms = Named_Theorems.get ctxt @{named_theorems sepref_relprops_simps}
fun simp thms = Conv.fconv_rule (
Simplifier.asm_full_rewrite
(put_simpset HOL_basic_ss ctxt addsimps thms))
local
val (_,R,S) = case Thm.concl_of thm of
@{mpat "Trueprop (_∈hfref ?P ?R ?S)"} => (P,R,S)
| @{mpat "Trueprop (_∈fref ?P ?R ?S)"} => (P,R,S)
| _ => raise THM("cleanup_constraints: Expected hfref or fref-theorem",~1,[thm])
fun flt_pat @{mpat "pure (the_pure ?A)"} = SOME A | flt_pat _ = NONE
val purify_terms =
(list_filtered_subterms flt_pat R @ list_filtered_subterms flt_pat S)
|> distinct op aconv
val thm = fold (add_is_pure_constraint ctxt) purify_terms thm
in
val thm = thm
end
val thm = thm
|> Local_Defs.unfold0 ctxt xform_thms
|> Local_Defs.unfold0 ctxt rprops_thms
val insts = map (fn
@{mpat "Trueprop (CONSTRAINT _ (the_pure _))"} => @{thm handle_purity1}
| _ => asm_rl
) (Thm.prems_of thm)
val thm = (thm OF insts)
|> Conv.fconv_rule Thm.eta_conversion
|> simp @{thms handle_purity2}
|> simp simp_thms
val thm = singleton (Variable.export ctxt orig_ctxt) thm
in
thm
end
end
fun simp_precond_tac ctxt = let
fun simp_only thms = asm_full_simp_tac (put_simpset HOL_basic_ss ctxt addsimps thms)
val rtac = resolve_tac ctxt
val cnv_ss = ctxt delsimps @{thms CNV_def}
val prove_cnv_tac = SOLVED' (rtac @{thms CNV_prove} THEN' SELECT_GOAL (auto_tac ctxt))
val do_cnv_tac =
(cp_clarsimp_tac cnv_ss) THEN_ALL_NEW
(TRY o REPEAT_ALL_NEW (match_tac ctxt @{thms CNV_split}))
THEN_ALL_NEW (prove_cnv_tac ORELSE' rtac @{thms CNV_I})
val final_simp_tac =
rtac @{thms simp_pre_final_simp}
THEN' cp_clarsimp_tac cnv_ss
THEN' dbg_msg_tac (Sepref_Debugging.msg_subgoal "final_simp_tac: Before CNV_I") ctxt
THEN' rtac @{thms CNV_I}
THEN' dbg_msg_tac (Sepref_Debugging.msg_text "Final-Simp done") ctxt
val simp_tupled_pre_tac =
SELECT_GOAL (Local_Defs.unfold0_tac ctxt @{thms prod_casesK uncurry0_hfref_post})
THEN' REPEAT' (EqSubst.eqsubst_tac ctxt [1] @{thms case_prod_eta})
THEN' rtac @{thms CNV_I}
val unfold_and_tac = rtac @{thms and_pre_beta} THEN_ALL_NEW simp_only @{thms split}
val simp_and1_tac =
rtac @{thms simp_and1} THEN' do_cnv_tac
val simp_and2_tac =
rtac @{thms simp_and2} THEN' do_cnv_tac
val and_plan_tac =
simp_and1_tac
THEN' dbg_msg_tac (Sepref_Debugging.msg_subgoal "State after and1") ctxt
THEN' (
rtac @{thms triv_and1}
ORELSE'
dbg_msg_tac (Sepref_Debugging.msg_subgoal "Invoking and2 on") ctxt
THEN' simp_and2_tac
THEN' dbg_msg_tac (Sepref_Debugging.msg_subgoal "State before final_simp_tac") ctxt
THEN' final_simp_tac
)
val unfold_imp_tac = rtac @{thms imp_pre_beta} THEN_ALL_NEW simp_only @{thms split}
val simp_imp1_tac =
rtac @{thms simp_imp} THEN' do_cnv_tac
val imp_plan_tac = simp_imp1_tac THEN' final_simp_tac
val imp_pre_tac = APPLY_LIST [
simp_only @{thms split_tupled_all}
THEN' Refine_Util.instantiate_tuples_subgoal_tac ctxt
THEN' CASES' [
(unfold_and_tac, ALLGOALS and_plan_tac),
(unfold_imp_tac, ALLGOALS imp_plan_tac)
]
,
simp_tupled_pre_tac
]
val imp_pre_custom_tac =
SELECT_GOAL (Local_Defs.unfold0_tac ctxt @{thms and_pre_def}) THEN'
TRY o SOLVED' (SELECT_GOAL (auto_tac ctxt))
in
CASES' [
(rtac @{thms IMP_PRE_eqI}, imp_pre_tac 1),
(rtac @{thms IMP_PRE_CUSTOMI}, ALLGOALS imp_pre_custom_tac)
]
end
local
fun inf_bn_aux name =
case String.tokens (fn c => c = #".") name of
[] => NONE
| [a] => SOME (Binding.name a)
| (_::a::_) => SOME (Binding.name a)
in
fun infer_basename (Const ("_type_constraint_",_)$t) = infer_basename t
| infer_basename (Const (name,_)) = inf_bn_aux name
| infer_basename (Free (name,_)) = inf_bn_aux name
| infer_basename _ = NONE
end
val cfg_mop = Attrib.setup_config_bool @{binding sepref_register_mop} (K true)
val cfg_ismop = Attrib.setup_config_bool @{binding sepref_register_ismop} (K false)
val cfg_rawgoals = Attrib.setup_config_bool @{binding sepref_register_rawgoals} (K false)
val cfg_transfer = Attrib.setup_config_bool @{binding sepref_decl_impl_transfer} (K true)
val cfg_def = Attrib.setup_config_bool @{binding sepref_register_def} (K true)
val cfg_register = Attrib.setup_config_bool @{binding sepref_decl_impl_register} (K true)
local
open Refine_Util
val flags =
parse_bool_config' "mop" cfg_mop
|| parse_bool_config' "ismop" cfg_ismop
|| parse_bool_config' "rawgoals" cfg_rawgoals
|| parse_bool_config' "def" cfg_def
val parse_flags = parse_paren_list' flags
val parse_name = Scan.option (Parse.binding --| @{keyword ":"})
val parse_relconds = Scan.optional (@{keyword "where"} |-- Parse.and_list1 (Scan.repeat1 Parse.prop) >> flat) []
in
val do_parser = parse_flags -- parse_name -- Parse.term --| @{keyword "::"} -- Parse.term -- parse_relconds
end
fun do_cmd ((((flags,name),opt_raw), relt_raw),relconds_raw) lthy = let
local
val ctxt = Refine_Util.apply_configs flags lthy
in
val flag_ismop = Config.get ctxt cfg_ismop
val flag_mop = Config.get ctxt cfg_mop andalso not flag_ismop
val flag_rawgoals = Config.get ctxt cfg_rawgoals
val flag_def = Config.get ctxt cfg_def
end
open Sepref_Basic Sepref_Rules
val relt = Syntax.parse_term lthy relt_raw
val relconds = map (Syntax.parse_prop lthy) relconds_raw
val _ = dbg_trace lthy "Parse relation and relation conditions together"
val relt = Const (@{const_name "Pure.term"}, dummyT) $ relt
local
val l = Syntax.check_props lthy (relt::relconds)
in
val (relt, relconds) = (hd l, tl l)
end
val relt = Logic.dest_term relt
val opt_pre = Syntax.parse_term lthy opt_raw
val _ = dbg_trace lthy "Infer basename"
val name = case name of
SOME name => name
| NONE => (
case infer_basename opt_pre of
NONE => (error "Could not infer basename: You have to specify a basename"; Binding.empty)
| SOME name => name
)
fun qname s n = Binding.qualify true (Binding.name_of n) (Binding.name s)
fun def name t_pre attribs lthy = let
val t = Syntax.check_term lthy t_pre
val lthy = (snd o Local_Theory.begin_nested) lthy
val ((dt,(_,thm)),lthy) = Local_Theory.define
((name,Mixfix.NoSyn),((Thm.def_binding name,@{attributes [code]}@attribs),t)) lthy;
val (lthy, lthy_old) = `Local_Theory.end_nested lthy
val phi = Proof_Context.export_morphism lthy_old lthy
val thm = Morphism.thm phi thm
val dt = Morphism.term phi dt
in
((dt,thm),lthy)
end
val _ = dbg_trace lthy "Analyze Relation"
val (pre,args,res) = analyze_rel relt
val specified_pre = is_some pre
val pre = the_default (mk_triv_precond args) pre
val def_thms = @{thms PR_CONST_def}
val _ = dbg_trace lthy "Define op"
val op_name = Binding.prefix_name (if flag_ismop then "mop_" else "op_") name
val (def_thms,opc,lthy) =
if flag_def then let
val ((opc,op_def_thm),lthy) = def op_name opt_pre @{attributes [simp]} lthy
val opc = Refine_Util.dummify_tvars opc
val def_thms = op_def_thm::def_thms
in
(def_thms,opc,lthy)
end
else let
val _ = dbg_trace lthy "Refine type of opt_pre to get opc"
val opc = Syntax.check_term lthy opt_pre
val new_ctxt = Variable.declare_term opc lthy
val opc = singleton (Variable.export_terms new_ctxt lthy) opc
|> Refine_Util.dummify_tvars
in
(def_thms,opc,lthy)
end
fun pr_const_heuristics basename c_pre lthy = let
val _ = dbg_trace lthy ("PR_CONST heuristics " ^ @{make_string} c_pre)
val c = Syntax.check_term lthy c_pre
in
case c of
@{mpat "PR_CONST _"} => ((c_pre,false),lthy)
| Const _ => ((c_pre,false),lthy)
| _ => let
val (f,args) = strip_comb c
val lthy = case f of Const _ => let
val ctxt = Variable.declare_term c lthy
val lhs = Autoref_Tagging.list_APP (f,args)
val rhs = @{mk_term "UNPROTECT ?c"}
val goal = Logic.mk_equals (lhs,rhs) |> Thm.cterm_of ctxt
val tac =
Local_Defs.unfold0_tac ctxt @{thms APP_def UNPROTECT_def}
THEN ALLGOALS (simp_tac (put_simpset HOL_basic_ss ctxt))
val thm = Goal.prove_internal ctxt [] goal (K tac)
|> singleton (Variable.export ctxt lthy)
val (_,lthy) = Local_Theory.note
((Binding.suffix_name "_def_pat" basename,@{attributes [def_pat_rules]}),[thm]) lthy
val _ = Thm.pretty_thm lthy thm |> Pretty.string_of |> writeln
in
lthy
end
| _ => (
Pretty.block [
Pretty.str "Complex operation pattern. Added PR_CONST but no pattern rules:",
Pretty.brk 1,Syntax.pretty_term lthy c]
|> Pretty.string_of |> warning
; lthy)
val c_pre = Const(@{const_name PR_CONST},dummyT)$c_pre
in
((c_pre,true),lthy)
end
end
val ((opc,_),lthy) = pr_const_heuristics op_name opc lthy
val arg_intfs = map (get_intf_of_rel lthy) args
val res_intf = get_intf_of_rel lthy res
fun register basename c lthy = let
val _ = dbg_trace lthy "Register"
open Sepref_Basic
val c = Syntax.check_term lthy c
val ri = case (is_nresT (body_type (fastype_of c)), is_nresT res_intf) of
(true,false) => mk_nresT res_intf
| (false,true) => dest_nresT res_intf
| _ => res_intf
val iT = arg_intfs ---> ri
val ((_,itype_thm),lthy) = Sepref_Combinator_Setup.sepref_register_single (Binding.name_of basename) c iT lthy
val _ = Thy_Output.pretty_thm lthy itype_thm |> Pretty.string_of |> writeln
in
lthy
end
val lthy = register op_name opc lthy
val _ = dbg_trace lthy "Define pre"
val pre_name = Binding.prefix_name "pre_" name
val ((prec,pre_def_thm),lthy) = def pre_name pre @{attributes [simp]} lthy
val prec = Refine_Util.dummify_tvars prec
val def_thms = pre_def_thm::def_thms
val pre = constrain_type_pre (fastype_of pre) prec |> Syntax.check_term lthy
val _ = dbg_trace lthy "Convert both, relation and operation to uncurried form, and add nres"
val _ = dbg_trace lthy "Convert relation (arguments have already been separated by analyze-rel)"
val res = case res of @{mpat "⟨_⟩nres_rel"} => res | _ => @{mk_term "⟨?res⟩nres_rel"}
val relt = mk_rel (SOME pre,args,res)
val _ = dbg_trace lthy "Convert operation"
val opcT = fastype_of (Syntax.check_term lthy opc)
val op_is_nres = Sepref_Basic.is_nresT (body_type opcT)
val (opcu, op_ar) = let
val arity = binder_types #> length
val res_ar = arity (Relators.rel_absT res |> not op_is_nres ? dest_nresT)
val op_ar = arity opcT - res_ar
val _ = op_ar = length args orelse
raise TERM("Operation/relation arity mismatch: " ^ string_of_int op_ar ^ " vs " ^ string_of_int (length args),[opc,relt])
val opc =
if op_is_nres then opc
else mk_compN_pre op_ar (Const(@{const_name Refine_Basic.RETURN},dummyT)) opc
val opc = mk_uncurryN_pre op_ar opc
in
(opc, op_ar)
end
val declare_mop = (specified_pre orelse not op_is_nres) andalso flag_mop
val (mop_data,lthy) = if declare_mop then let
val _ = dbg_trace lthy "mop definition"
val mop_rhs = Const(@{const_name mop},dummyT) $ prec $ opcu
|> mk_curryN_pre op_ar
val mop_name = Binding.prefix_name "mop_" name
val ((mopc,mop_def_thm),lthy) = def mop_name mop_rhs [] lthy
val mopc = Refine_Util.dummify_tvars mopc
val ((mopc,added_pr_const),lthy) = pr_const_heuristics mop_name mopc lthy
val mop_def_thm' = if added_pr_const then
mop_def_thm RS @{thm add_PR_CONST_to_def}
else mop_def_thm
val (_,lthy) = Local_Theory.note ((Binding.empty, @{attributes [sepref_mop_def_thms]}),[mop_def_thm']) lthy
val _ = dbg_trace lthy "mop alternative definition"
val alt_unfolds = @{thms mop_alt_unfolds}
|> not specified_pre ? curry op :: pre_def_thm
val mop_alt_thm = Local_Defs.unfold0 lthy alt_unfolds mop_def_thm
|> Refine_Util.shift_lambda_leftN op_ar
val (_,lthy) = Local_Theory.note ((Binding.suffix_name "_alt" mop_name,@{attributes [simp]}),[mop_alt_thm]) lthy
val _ = dbg_trace lthy "mop: register"
val lthy = register mop_name mopc lthy
val _ = dbg_trace lthy "mop: vcg theorem"
local
val Ts = map Relators.rel_absT args
val ctxt = Variable.declare_thm mop_def_thm lthy
val ctxt = fold Variable.declare_typ Ts ctxt
val (x,ctxt) = Refine_Util.fix_left_tuple_from_Ts "x" Ts ctxt
val mop_def_thm = mop_def_thm
|> Local_Defs.unfold0 ctxt @{thms curry_shl}
fun prep_thm thm = (thm OF [mop_def_thm])
|> Drule.infer_instantiate' ctxt [SOME (Thm.cterm_of ctxt x)]
|> Local_Defs.unfold0 ctxt @{thms uncurry_apply uncurry0_apply o_apply}
|> Local_Defs.unfold0 ctxt (def_thms @
@{thms Product_Type.split HOL.True_implies_equals})
|> singleton (Variable.export ctxt lthy)
val thms = map prep_thm @{thms mop_spec_rl_from_def mop_leof_rl_from_def}
in
val (_,lthy) = Local_Theory.note ((qname "vcg" mop_name,@{attributes [refine_vcg]}),thms) lthy
end
in
(SOME (mop_name,mopc,mop_def_thm),lthy)
end
else (NONE,lthy)
val _ = dbg_trace lthy "Build Parametricity Theorem"
val param_t = mk_pair_in_pre opcu opcu relt
|> Syntax.check_term lthy
|> HOLogic.mk_Trueprop
|> curry Logic.list_implies relconds
val _ = dbg_trace lthy "Build Parametricity Theorem for Precondition"
val param_pre_t =
let
val pre_relt = Relators.mk_fun_rel (Relators.list_prodrel_left args) @{term bool_rel}
val param_pre_t = mk_pair_in_pre prec prec pre_relt
|> Syntax.check_term lthy
|> HOLogic.mk_Trueprop
|> curry Logic.list_implies relconds
in
param_pre_t
end
val _ = dbg_trace lthy "Build goals"
val goals = [[ (param_t, []), (param_pre_t, []) ]]
fun after_qed [[p_thm, pp_thm]] _ =
let
val _ = dbg_trace lthy "after_qed"
val p_thm' = p_thm |> not specified_pre ? Local_Defs.unfold0 lthy [pre_def_thm]
val (_,lthy) = Local_Theory.note ((qname "fref" op_name,@{attributes [sepref_fref_thms]}), [p_thm']) lthy
val (_,lthy) = Local_Theory.note ((qname "param" pre_name,@{attributes [param]}), [pp_thm]) lthy
val p'_unfolds = pre_def_thm :: @{thms True_implies_equals}
val (_,lthy) = Local_Theory.note ((qname "fref'" op_name,[]), [Local_Defs.unfold0 lthy p'_unfolds p_thm]) lthy
val lthy = case mop_data of NONE => lthy |
SOME (mop_name,mopc,mop_def_thm) => let
val _ = dbg_trace lthy "Build and prove mop-stuff"
val mopcu = mk_uncurryN_pre op_ar mopc
val param_mop_t = mk_pair_in_pre mopcu mopcu (mk_rel (NONE,args,res))
|> Syntax.check_term lthy
|> HOLogic.mk_Trueprop
|> curry Logic.list_implies relconds
val ctxt = Proof_Context.augment param_mop_t lthy
val tac = let
val p_thm = Local_Defs.unfold0 ctxt @{thms PR_CONST_def} p_thm
in
Local_Defs.unfold0_tac ctxt (mop_def_thm :: @{thms PR_CONST_def uncurry_curry_id uncurry_curry0_id})
THEN FIRSTGOAL (
dbg_msg_tac (Sepref_Debugging.msg_subgoal "Mop-param thm goal after unfolding") ctxt THEN'
resolve_tac ctxt @{thms param_mopI}
THEN' SOLVED' (resolve_tac ctxt [p_thm] THEN_ALL_NEW assume_tac ctxt)
THEN' SOLVED' (resolve_tac ctxt [pp_thm] THEN_ALL_NEW assume_tac ctxt)
)
end
val pm_thm = Goal.prove_internal lthy [] (Thm.cterm_of ctxt param_mop_t) (K tac)
|> singleton (Variable.export ctxt lthy)
val (_,lthy) = Local_Theory.note ((qname "fref" mop_name,@{attributes [sepref_fref_thms]}), [pm_thm]) lthy
val (_,lthy) = Local_Theory.note ((qname "fref'" mop_name,[]), [Local_Defs.unfold0 lthy p'_unfolds pm_thm]) lthy
in
lthy
end
in
lthy
end
| after_qed thmss _ = raise THM ("After-qed: Wrong thmss structure",~1,flat thmss)
fun std_tac ctxt = let
val ptac = REPEAT_ALL_NEW_FWD (Parametricity.net_tac (Parametricity.get_dflt ctxt) ctxt)
val ctxt = ctxt
|> Context_Position.set_visible false
|> Context.proof_map (Thm.attribute_declaration Clasimp.iff_del @{thm pair_in_Id_conv})
in
if flag_rawgoals then
all_tac
else
Local_Defs.unfold0_tac ctxt def_thms THEN ALLGOALS (
TRY o SOLVED' (
TRY o resolve_tac ctxt @{thms frefI}
THEN' TRY o REPEAT_ALL_NEW (ematch_tac ctxt @{thms prod_relE})
THEN' simp_tac (put_simpset HOL_basic_ss ctxt addsimps @{thms split uncurry_apply uncurry0_apply})
THEN' (
SOLVED' (ptac THEN_ALL_NEW asm_full_simp_tac ctxt)
ORELSE' SOLVED' (cp_clarsimp_tac ctxt THEN_ALL_NEW_FWD ptac THEN_ALL_NEW SELECT_GOAL (auto_tac ctxt))
)
)
)
end
val rf_std = Proof.refine (Method.Basic (fn ctxt => SIMPLE_METHOD (std_tac ctxt)))
#> Seq.the_result "do_cmd: Standard proof tactic returned empty result sequence"
in
Proof.theorem NONE after_qed goals lthy
|> rf_std
end
val _ = Outer_Syntax.local_theory_to_proof @{command_keyword "sepref_decl_op"}
"" (do_parser >> do_cmd)
local
fun unfold_PR_CONST_tac ctxt = SELECT_GOAL (Local_Defs.unfold0_tac ctxt @{thms PR_CONST_def})
fun transfer_precond_rl ctxt t R = let
val t' = map_types (K dummyT) t
val goal = Sepref_Basic.mk_pair_in_pre t t' R
|> Syntax.check_term ctxt
|> Thm.cterm_of ctxt
val thm = Drule.infer_instantiate' ctxt [NONE,SOME goal] @{thm IMP_LIST_trivial}
in
thm
end
fun generate_mop_thm ctxt op_thm = let
val orig_ctxt = ctxt
val (op_thm, ctxt) = yield_singleton (apfst snd oo Variable.import true) op_thm ctxt
val mop_def_thms = Named_Theorems_Rev.get ctxt @{named_theorems_rev sepref_mop_def_thms}
|> map (Local_Defs.unfold0 ctxt @{thms curry_shl})
fun fail_hnr_tac _ _ = raise THM("Invalid hnr-theorem",~1,[op_thm])
fun fail_mop_def_tac i st = let
val g = nth (Thm.prems_of st) (i-1)
in
raise TERM("Found no matching mop-definition",[g])
end
val tac = APPLY_LIST [
resolve_tac ctxt [op_thm] ORELSE' fail_hnr_tac,
( resolve_tac ctxt mop_def_thms) ORELSE' fail_mop_def_tac,
simp_precond_tac ctxt ORELSE' Sepref_Debugging.error_tac' "precond simplification failed" ctxt
] 1
val st = @{thm hfref_op_to_mop}
val st = Goal.protect (Thm.nprems_of st) st
val mop_thm = tac st |> Seq.hd |> Goal.conclude
val mop_thm = singleton (Variable.export ctxt orig_ctxt) mop_thm
|> Sepref_Rules.norm_fcomp_rule orig_ctxt
in mop_thm end
fun generate_op_thm ctxt mop_thm = let
val orig_ctxt = ctxt
val (mop_thm, ctxt) = yield_singleton (apfst snd oo Variable.import true) mop_thm ctxt
val mop_def_thms = Named_Theorems_Rev.get ctxt @{named_theorems_rev sepref_mop_def_thms}
|> map (Local_Defs.unfold0 ctxt @{thms curry_shl})
fun fail_hnr_tac _ _ = raise THM("Invalid hnr-theorem",~1,[mop_thm])
fun fail_mop_def_tac i st = let
val g = nth (Thm.prems_of st) (i-1)
in
raise TERM("Found no matching mop-definition",[g])
end
val tac = APPLY_LIST [
resolve_tac ctxt [mop_thm] ORELSE' fail_hnr_tac,
( resolve_tac ctxt mop_def_thms) ORELSE' fail_mop_def_tac,
simp_precond_tac ctxt ORELSE' Sepref_Debugging.error_tac' "precond simplification failed" ctxt
] 1
val st = @{thm hfref_mop_to_op}
val st = Goal.protect (Thm.nprems_of st) st
val op_thm = tac st |> Seq.hd |> Goal.conclude
val op_thm = singleton (Variable.export ctxt orig_ctxt) op_thm
|> Sepref_Rules.norm_fcomp_rule orig_ctxt
in op_thm end
fun chk_result ctxt thm = let
val (_,R,S) = case Thm.concl_of thm of
@{mpat "Trueprop (_∈hfref ?P ?R ?S)"} => (P,R,S)
| _ => raise THM("chk_result: Expected hfref-theorem",~1,[thm])
fun err t = let
val ts = Syntax.pretty_term ctxt t |> Pretty.string_of
in
raise THM ("chk_result: Invalid pattern left in assertions: " ^ ts,~1,[thm])
end
fun check_invalid (t as @{mpat "hr_comp _ _"}) = err t
| check_invalid (t as @{mpat "hrp_comp _ _"}) = err t
| check_invalid (t as @{mpat "pure (the_pure _)"}) = err t
| check_invalid (t as @{mpat "_ O _"}) = err t
| check_invalid _ = false
val _ = exists_subterm check_invalid R
val _ = exists_subterm check_invalid S
in
()
end
fun to_IMP_LIST ctxt thm =
(thm RS @{thm to_IMP_LISTI}) |> Local_Defs.unfold0 ctxt @{thms to_IMP_LIST}
fun from_IMP_LIST ctxt thm = thm |> Local_Defs.unfold0 ctxt @{thms from_IMP_LIST}
in
local
open Refine_Util
val flags =
parse_bool_config' "mop" cfg_mop
|| parse_bool_config' "ismop" cfg_ismop
|| parse_bool_config' "transfer" cfg_transfer
|| parse_bool_config' "rawgoals" cfg_rawgoals
|| parse_bool_config' "register" cfg_register
val parse_flags = parse_paren_list' flags
val parse_precond = Scan.option (@{keyword "["} |-- Parse.term --| @{keyword "]"})
val parse_fref_thm = Scan.option (@{keyword "uses"} |-- Parse.thm)
in
val di_parser = parse_flags -- Scan.optional (Parse.binding --| @{keyword ":"}) Binding.empty -- parse_precond -- Parse.thm -- parse_fref_thm
end
fun di_cmd ((((flags,name), precond_raw), i_thm_raw), p_thm_raw) lthy = let
val i_thm = singleton (Attrib.eval_thms lthy) i_thm_raw
val p_thm = map_option (singleton (Attrib.eval_thms lthy)) p_thm_raw
local
val ctxt = Refine_Util.apply_configs flags lthy
in
val flag_mop = Config.get ctxt cfg_mop
val flag_ismop = Config.get ctxt cfg_ismop
val flag_rawgoals = Config.get ctxt cfg_rawgoals
val flag_transfer = Config.get ctxt cfg_transfer
val flag_register = Config.get ctxt cfg_register
end
val fr_attribs = if flag_register then @{attributes [sepref_fr_rules]} else []
val ctxt = lthy
val _ = dbg_trace lthy "Compose with fref"
local
val hf_tcomp_pre = @{thm hfcomp_tcomp_pre} OF [asm_rl,i_thm]
fun compose p_thm = let
val p_thm = p_thm |> to_assns_rl false lthy
in
hf_tcomp_pre OF [p_thm]
end
in
val thm = case p_thm of
SOME p_thm => compose p_thm
| NONE => let
val p_thms = Named_Theorems_Rev.get ctxt @{named_theorems_rev sepref_fref_thms}
fun err () = let
val prem_s = nth (Thm.prems_of hf_tcomp_pre) 0 |> Syntax.pretty_term ctxt |> Pretty.string_of
in
error ("Found no fref-theorem matching " ^ prem_s)
end
in
case get_first (try compose) p_thms of
NONE => err ()
| SOME thm => thm
end
end
val (thm,ctxt) = yield_singleton (apfst snd oo Variable.import true) thm ctxt
val _ = dbg_trace lthy "Transfer Precond"
val thm = to_IMP_LIST ctxt thm
val thm = thm RS @{thm transform_pre_param}
local
val (pre,R,pp_name,pp_type) = case Thm.prems_of thm of
[@{mpat "Trueprop (IMP_LIST _ ((?pre,_)∈?R))"}, @{mpat "Trueprop (IMP_PRE (mpaq_STRUCT (mpaq_Var ?pp_name ?pp_type)) _)"}] => (pre,R,pp_name,pp_type)
| _ => raise THM("di_cmd: Cannot recognize first prems of transform_pre_param: ", ~1,[thm])
in
val thm = if flag_transfer then thm OF [transfer_precond_rl ctxt pre R] else thm
val thm = case precond_raw of
NONE => thm
| SOME precond_raw => let
val precond = Syntax.parse_term ctxt precond_raw
|> Sepref_Basic.constrain_type_pre pp_type
|> Syntax.check_term ctxt
|> Thm.cterm_of ctxt
val thm = Drule.infer_instantiate ctxt [(pp_name,precond)] thm
val thm = thm OF [asm_rl,@{thm IMP_PRE_CUSTOMD}]
in
thm
end
end
val _ = dbg_trace lthy "Build goals"
val goals = [map (fn x => (x,[])) (Thm.prems_of thm)]
fun after_qed thmss _ = let
val _ = dbg_trace lthy "After QED"
val prems_thms = hd thmss
val thm = thm OF prems_thms
val thm = from_IMP_LIST ctxt thm
val _ = dbg_trace lthy "Cleanup"
val thm = thm
|> cleanup_constraints ctxt
|> Sepref_Rules.norm_fcomp_rule ctxt
|> cleanup_constraints ctxt
|> Sepref_Rules.norm_fcomp_rule ctxt
val thm = thm
|> singleton (Variable.export ctxt lthy)
|> zero_var_indexes
val _ = dbg_trace lthy "Check Result"
val _ = chk_result lthy thm
fun qname suffix = if Binding.is_empty name then name else Binding.suffix_name suffix name
val thm_name = if flag_ismop then qname "_hnr_mop" else qname "_hnr"
val (_,lthy) = Local_Theory.note ((thm_name,fr_attribs),[thm]) lthy
val _ = Thm.pretty_thm lthy thm |> Pretty.string_of |> writeln
val cr_mop_thm = flag_mop andalso not flag_ismop
val lthy =
if cr_mop_thm then
let
val _ = dbg_trace lthy "Create mop-thm"
val mop_thm = thm
|> generate_mop_thm lthy
|> zero_var_indexes
val (_,lthy) = Local_Theory.note ((qname "_hnr_mop",fr_attribs),[mop_thm]) lthy
val _ = Thm.pretty_thm lthy mop_thm |> Pretty.string_of |> writeln
in lthy end
else lthy
val cr_op_thm = flag_ismop
val lthy =
if cr_op_thm then
let
val _ = dbg_trace lthy "Create op-thm"
val op_thm = thm
|> generate_op_thm lthy
|> zero_var_indexes
val (_,lthy) = Local_Theory.note ((qname "_hnr",fr_attribs),[op_thm]) lthy
val _ = Thm.pretty_thm lthy op_thm |> Pretty.string_of |> writeln
in lthy end
else lthy
in
lthy
end
fun std_tac ctxt = let
val ptac = REPEAT_ALL_NEW_FWD (
Parametricity.net_tac (Parametricity.get_dflt ctxt) ctxt ORELSE' assume_tac ctxt
)
in
if flag_rawgoals orelse not flag_transfer then
all_tac
else
APPLY_LIST [
SELECT_GOAL (Local_Defs.unfold0_tac ctxt @{thms from_IMP_LIST}) THEN' TRY o SOLVED' ptac,
simp_precond_tac ctxt
] 1
end
val rf_std = Proof.refine (Method.Basic (fn ctxt => SIMPLE_METHOD (std_tac ctxt)))
#> Seq.the_result "di_cmd: Standard proof tactic returned empty result sequence"
in
Proof.theorem NONE after_qed goals ctxt
|> rf_std
end
val _ = Outer_Syntax.local_theory_to_proof @{command_keyword "sepref_decl_impl"}
"" (di_parser >> di_cmd)
end
end
›
subsection ‹Obsolete Manual Specification Helpers›
lemma vcg_of_RETURN_np:
assumes "f ≡ RETURN r"
shows "SPEC (λx. x=r) ≤ m ⟹ f ≤ m"
and "SPEC (λx. x=r) ≤⇩n m ⟹ f ≤⇩n m"
using assms
by (auto simp: pw_le_iff pw_leof_iff)
lemma vcg_of_RETURN:
assumes "f ≡ do { ASSERT Φ; RETURN r }"
shows "⟦Φ; SPEC (λx. x=r) ≤ m⟧ ⟹ f ≤ m"
and "⟦Φ ⟹ SPEC (λx. x=r) ≤⇩n m⟧ ⟹ f ≤⇩n m"
using assms
by (auto simp: pw_le_iff pw_leof_iff refine_pw_simps)
lemma vcg_of_SPEC:
assumes "f ≡ do { ASSERT pre; SPEC post }"
shows "⟦pre; SPEC post ≤ m⟧ ⟹ f ≤ m"
and "⟦pre ⟹ SPEC post ≤⇩n m⟧ ⟹ f ≤⇩n m"
using assms
by (auto simp: pw_le_iff pw_leof_iff refine_pw_simps)
lemma vcg_of_SPEC_np:
assumes "f ≡ SPEC post"
shows "SPEC post ≤ m ⟹ f ≤ m"
and "SPEC post ≤⇩n m ⟹ f ≤⇩n m"
using assms
by auto
lemma mk_mop_rl1:
assumes "⋀x. mf x ≡ ASSERT (P x) ⪢ RETURN (f x)"
shows "(RETURN o f, mf) ∈ Id → ⟨Id⟩nres_rel"
unfolding assms[abs_def]
by (auto intro!: nres_relI simp: pw_le_iff refine_pw_simps)
lemma mk_mop_rl2:
assumes "⋀x y. mf x y ≡ ASSERT (P x y) ⪢ RETURN (f x y)"
shows "(RETURN oo f, mf) ∈ Id → Id → ⟨Id⟩nres_rel"
unfolding assms[abs_def]
by (auto intro!: nres_relI simp: pw_le_iff refine_pw_simps)
lemma mk_mop_rl3:
assumes "⋀x y z. mf x y z ≡ ASSERT (P x y z) ⪢ RETURN (f x y z)"
shows "(RETURN ooo f, mf) ∈ Id → Id → Id → ⟨Id⟩nres_rel"
unfolding assms[abs_def]
by (auto intro!: nres_relI simp: pw_le_iff refine_pw_simps)
lemma mk_mop_rl0_np:
assumes "mf ≡ RETURN f"
shows "(RETURN f, mf) ∈ ⟨Id⟩nres_rel"
unfolding assms[abs_def]
by (auto intro!: nres_relI simp: pw_le_iff refine_pw_simps)
lemma mk_mop_rl1_np:
assumes "⋀x. mf x ≡ RETURN (f x)"
shows "(RETURN o f, mf) ∈ Id → ⟨Id⟩nres_rel"
unfolding assms[abs_def]
by (auto intro!: nres_relI simp: pw_le_iff refine_pw_simps)
lemma mk_mop_rl2_np:
assumes "⋀x y. mf x y ≡ RETURN (f x y)"
shows "(RETURN oo f, mf) ∈ Id → Id → ⟨Id⟩nres_rel"
unfolding assms[abs_def]
by (auto intro!: nres_relI simp: pw_le_iff refine_pw_simps)
lemma mk_mop_rl3_np:
assumes "⋀x y z. mf x y z ≡ RETURN (f x y z)"
shows "(RETURN ooo f, mf) ∈ Id → Id → Id → ⟨Id⟩nres_rel"
unfolding assms[abs_def]
by (auto intro!: nres_relI simp: pw_le_iff refine_pw_simps)
lemma mk_op_rl0_np:
assumes "mf ≡ RETURN f"
shows "(uncurry0 mf, uncurry0 (RETURN f)) ∈ unit_rel →⇩f ⟨Id⟩nres_rel"
apply (intro frefI nres_relI)
apply (auto simp: assms)
done
lemma mk_op_rl1:
assumes "⋀x. mf x ≡ ASSERT (P x) ⪢ RETURN (f x)"
shows "(mf, RETURN o f) ∈ [P]⇩f Id → ⟨Id⟩nres_rel"
apply (intro frefI nres_relI)
apply (auto simp: assms)
done
lemma mk_op_rl1_np:
assumes "⋀x. mf x ≡ RETURN (f x)"
shows "(mf, (RETURN o f)) ∈ Id →⇩f ⟨Id⟩nres_rel"
apply (intro frefI nres_relI)
apply (auto simp: assms)
done
lemma mk_op_rl2:
assumes "⋀x y. mf x y ≡ ASSERT (P x y) ⪢ RETURN (f x y)"
shows "(uncurry mf, uncurry (RETURN oo f)) ∈ [uncurry P]⇩f Id×⇩rId → ⟨Id⟩nres_rel"
apply (intro frefI nres_relI)
apply (auto simp: assms)
done
lemma mk_op_rl2_np:
assumes "⋀x y. mf x y ≡ RETURN (f x y)"
shows "(uncurry mf, uncurry (RETURN oo f)) ∈ Id×⇩rId →⇩f ⟨Id⟩nres_rel"
apply (intro frefI nres_relI)
apply (auto simp: assms)
done
lemma mk_op_rl3:
assumes "⋀x y z. mf x y z ≡ ASSERT (P x y z) ⪢ RETURN (f x y z)"
shows "(uncurry2 mf, uncurry2 (RETURN ooo f)) ∈ [uncurry2 P]⇩f (Id×⇩rId)×⇩rId → ⟨Id⟩nres_rel"
apply (intro frefI nres_relI)
apply (auto simp: assms)
done
lemma mk_op_rl3_np:
assumes "⋀x y z. mf x y z ≡ RETURN (f x y z)"
shows "(uncurry2 mf, uncurry2 (RETURN ooo f)) ∈ (Id×⇩rId)×⇩rId →⇩f ⟨Id⟩nres_rel"
apply (intro frefI nres_relI)
apply (auto simp: assms)
done
end
Theory Sepref_Tool
section ‹Sepref Tool›
theory Sepref_Tool
imports Sepref_Translate Sepref_Definition Sepref_Combinator_Setup Sepref_Intf_Util
begin
text ‹In this theory, we set up the sepref tool.›
subsection ‹Sepref Method›
lemma CONS_init:
assumes "hn_refine Γ c Γ' R a"
assumes "Γ' ⟹⇩t Γc'"
assumes "⋀a c. hn_ctxt R a c ⟹⇩t hn_ctxt Rc a c"
shows "hn_refine Γ c Γc' Rc a"
apply (rule hn_refine_cons)
apply (rule entt_refl)
apply (rule assms[unfolded hn_ctxt_def])+
done
lemma ID_init: "⟦ID a a' TYPE('T); hn_refine Γ c Γ' R a'⟧
⟹ hn_refine Γ c Γ' R a" by simp
lemma TRANS_init: "⟦ hn_refine Γ c Γ' R a; CNV c c' ⟧
⟹ hn_refine Γ c' Γ' R a"
by simp
lemma infer_post_triv: "P ⟹⇩t P" by (rule entt_refl)
ML ‹
structure Sepref = struct
structure sepref_preproc_simps = Named_Thms (
val name = @{binding sepref_preproc}
val description = "Sepref: Preprocessor simplifications"
)
structure sepref_opt_simps = Named_Thms (
val name = @{binding sepref_opt_simps}
val description = "Sepref: Post-Translation optimizations, phase 1"
)
structure sepref_opt_simps2 = Named_Thms (
val name = @{binding sepref_opt_simps2}
val description = "Sepref: Post-Translation optimizations, phase 2"
)
fun cons_init_tac ctxt = Sepref_Frame.weaken_post_tac ctxt THEN' resolve_tac ctxt @{thms CONS_init}
fun cons_solve_tac dbg ctxt = let
val dbgSOLVED' = if dbg then I else SOLVED'
in
dbgSOLVED' (
resolve_tac ctxt @{thms infer_post_triv}
ORELSE' Sepref_Translate.side_frame_tac ctxt
)
end
fun preproc_tac ctxt = let
val ctxt = put_simpset HOL_basic_ss ctxt
val ctxt = ctxt addsimps (sepref_preproc_simps.get ctxt)
in
Sepref_Rules.prepare_hfref_synth_tac ctxt THEN'
Simplifier.simp_tac ctxt
end
fun id_tac ctxt =
resolve_tac ctxt @{thms ID_init}
THEN' CONVERSION Thm.eta_conversion
THEN' DETERM o Id_Op.id_tac Id_Op.Normal ctxt
fun id_init_tac ctxt =
resolve_tac ctxt @{thms ID_init}
THEN' CONVERSION Thm.eta_conversion
THEN' Id_Op.id_tac Id_Op.Init ctxt
fun id_step_tac ctxt =
Id_Op.id_tac Id_Op.Step ctxt
fun id_solve_tac ctxt =
Id_Op.id_tac Id_Op.Solve ctxt
fun monadify_tac ctxt = Sepref_Monadify.monadify_tac ctxt
fun trans_tac ctxt = Sepref_Translate.trans_tac ctxt
fun opt_tac ctxt = let
val opt1_ss = put_simpset HOL_basic_ss ctxt
addsimps sepref_opt_simps.get ctxt
addsimprocs [@{simproc "HOL.let_simp"}]
|> Simplifier.add_cong @{thm SP_cong}
|> Simplifier.add_cong @{thm PR_CONST_cong}
val unsp_ss = put_simpset HOL_basic_ss ctxt addsimps @{thms SP_def}
val opt2_ss = put_simpset HOL_basic_ss ctxt
addsimps sepref_opt_simps2.get ctxt
addsimprocs [@{simproc "HOL.let_simp"}]
in
simp_tac opt1_ss THEN' simp_tac unsp_ss THEN'
simp_tac opt2_ss THEN' simp_tac unsp_ss THEN'
CONVERSION Thm.eta_conversion THEN'
resolve_tac ctxt @{thms CNV_I}
end
fun sepref_tac dbg ctxt =
(K Sepref_Constraints.ensure_slot_tac)
THEN'
Sepref_Basic.PHASES'
[
("preproc",preproc_tac,0),
("cons_init",cons_init_tac,2),
("id",id_tac,0),
("monadify",monadify_tac false,0),
("opt_init",fn ctxt => resolve_tac ctxt @{thms TRANS_init},1),
("trans",trans_tac,~1),
("opt",opt_tac,~1),
("cons_solve1",cons_solve_tac false,~1),
("cons_solve2",cons_solve_tac false,~1),
("constraints",fn ctxt => K (Sepref_Constraints.solve_constraint_slot ctxt THEN Sepref_Constraints.remove_slot_tac),~1)
] (Sepref_Basic.flag_phases_ctrl dbg) ctxt
val setup = I
#> sepref_preproc_simps.setup
#> sepref_opt_simps.setup
#> sepref_opt_simps2.setup
end
›
setup Sepref.setup
method_setup sepref = ‹Scan.succeed (fn ctxt =>
SIMPLE_METHOD (DETERM (SOLVED' (IF_EXGOAL (
Sepref.sepref_tac false ctxt
)) 1)))›
‹Automatic refinement to Imperative/HOL›
method_setup sepref_dbg_keep = ‹Scan.succeed (fn ctxt => let
in
SIMPLE_METHOD (IF_EXGOAL (Sepref.sepref_tac true ctxt) 1)
end)›
‹Automatic refinement to Imperative/HOL, debug mode›
subsubsection ‹Default Optimizer Setup›
lemma return_bind_eq_let: "do { x←return v; f x } = do { let x=v; f x }" by simp
lemmas [sepref_opt_simps] = return_bind_eq_let bind_return bind_bind id_def
text ‹We allow the synthesized function to contain tagged function applications.
This is important to avoid higher-order unification problems when synthesizing
generic algorithms, for example the to-list algorithm for foreach-loops.›
lemmas [sepref_opt_simps] = Autoref_Tagging.APP_def
text ‹Revert case-pulling done by monadify›
lemma case_prod_return_opt[sepref_opt_simps]:
"case_prod (λa b. return (f a b)) p = return (case_prod f p)"
by (simp split: prod.split)
lemma case_option_return_opt[sepref_opt_simps]:
"case_option (return fn) (λs. return (fs s)) v = return (case_option fn fs v)"
by (simp split: option.split)
lemma case_list_return[sepref_opt_simps]:
"case_list (return fn) (λx xs. return (fc x xs)) l = return (case_list fn fc l)"
by (simp split: list.split)
lemma if_return[sepref_opt_simps]:
"If b (return t) (return e) = return (If b t e)" by simp
text ‹In some cases, pushing in the returns is more convenient›
lemma case_prod_opt2[sepref_opt_simps2]:
"(λx. return (case x of (a,b) ⇒ f a b))
= (λ(a,b). return (f a b))"
by auto
subsection ‹Debugging Methods›
ML ‹
fun SIMPLE_METHOD_NOPARAM' tac = Scan.succeed (fn ctxt => SIMPLE_METHOD' (IF_EXGOAL (tac ctxt)))
fun SIMPLE_METHOD_NOPARAM tac = Scan.succeed (fn ctxt => SIMPLE_METHOD (tac ctxt))
›
method_setup sepref_dbg_preproc = ‹SIMPLE_METHOD_NOPARAM' (fn ctxt => K (Sepref_Constraints.ensure_slot_tac) THEN' Sepref.preproc_tac ctxt)›
‹Sepref debug: Preprocessing phase›
method_setup sepref_dbg_cons_init = ‹SIMPLE_METHOD_NOPARAM' Sepref.cons_init_tac›
‹Sepref debug: Initialize consequence reasoning›
method_setup sepref_dbg_id = ‹SIMPLE_METHOD_NOPARAM' (Sepref.id_tac)›
‹Sepref debug: Identify operations phase›
method_setup sepref_dbg_id_keep = ‹SIMPLE_METHOD_NOPARAM' (Config.put Id_Op.cfg_id_debug true #> Sepref.id_tac)›
‹Sepref debug: Identify operations phase. Debug mode, keep intermediate subgoals on failure.›
method_setup sepref_dbg_monadify = ‹SIMPLE_METHOD_NOPARAM' (Sepref.monadify_tac false)›
‹Sepref debug: Monadify phase›
method_setup sepref_dbg_monadify_keep = ‹SIMPLE_METHOD_NOPARAM' (Sepref.monadify_tac true)›
‹Sepref debug: Monadify phase›
method_setup sepref_dbg_monadify_arity = ‹SIMPLE_METHOD_NOPARAM' (Sepref_Monadify.arity_tac)›
‹Sepref debug: Monadify phase: Arity phase›
method_setup sepref_dbg_monadify_comb = ‹SIMPLE_METHOD_NOPARAM' (Sepref_Monadify.comb_tac)›
‹Sepref debug: Monadify phase: Comb phase›
method_setup sepref_dbg_monadify_check_EVAL = ‹SIMPLE_METHOD_NOPARAM' (K (CONCL_COND' (not o Sepref_Monadify.contains_eval)))›
‹Sepref debug: Monadify phase: check_EVAL phase›
method_setup sepref_dbg_monadify_mark_params = ‹SIMPLE_METHOD_NOPARAM' (Sepref_Monadify.mark_params_tac)›
‹Sepref debug: Monadify phase: mark_params phase›
method_setup sepref_dbg_monadify_dup = ‹SIMPLE_METHOD_NOPARAM' (Sepref_Monadify.dup_tac)›
‹Sepref debug: Monadify phase: dup phase›
method_setup sepref_dbg_monadify_remove_pass = ‹SIMPLE_METHOD_NOPARAM' (Sepref_Monadify.remove_pass_tac)›
‹Sepref debug: Monadify phase: remove_pass phase›
method_setup sepref_dbg_opt_init = ‹SIMPLE_METHOD_NOPARAM' (fn ctxt => resolve_tac ctxt @{thms TRANS_init})›
‹Sepref debug: Translation phase initialization›
method_setup sepref_dbg_trans = ‹SIMPLE_METHOD_NOPARAM' Sepref.trans_tac›
‹Sepref debug: Translation phase›
method_setup sepref_dbg_opt = ‹SIMPLE_METHOD_NOPARAM' (fn ctxt =>
Sepref.opt_tac ctxt
THEN' CONVERSION Thm.eta_conversion
THEN' TRY o resolve_tac ctxt @{thms CNV_I}
)›
‹Sepref debug: Optimization phase›
method_setup sepref_dbg_cons_solve = ‹SIMPLE_METHOD_NOPARAM' (Sepref.cons_solve_tac false)›
‹Sepref debug: Solve post-consequences›
method_setup sepref_dbg_cons_solve_keep = ‹SIMPLE_METHOD_NOPARAM' (Sepref.cons_solve_tac true)›
‹Sepref debug: Solve post-consequences, keep intermediate results›
method_setup sepref_dbg_constraints = ‹SIMPLE_METHOD_NOPARAM' (fn ctxt => IF_EXGOAL (K (
Sepref_Constraints.solve_constraint_slot ctxt
THEN Sepref_Constraints.remove_slot_tac
)))›
‹Sepref debug: Solve accumulated constraints›
method_setup sepref_dbg_id_init = ‹SIMPLE_METHOD_NOPARAM' Sepref.id_init_tac›
‹Sepref debug: Initialize operation identification phase›
method_setup sepref_dbg_id_step = ‹SIMPLE_METHOD_NOPARAM' Sepref.id_step_tac›
‹Sepref debug: Single step operation identification phase›
method_setup sepref_dbg_id_solve = ‹SIMPLE_METHOD_NOPARAM' Sepref.id_solve_tac›
‹Sepref debug: Complete current operation identification goal›
method_setup sepref_dbg_trans_keep = ‹SIMPLE_METHOD_NOPARAM' Sepref_Translate.trans_keep_tac›
‹Sepref debug: Translation phase, stop at failed subgoal›
method_setup sepref_dbg_trans_step = ‹SIMPLE_METHOD_NOPARAM' Sepref_Translate.trans_step_tac›
‹Sepref debug: Translation step›
method_setup sepref_dbg_trans_step_keep = ‹SIMPLE_METHOD_NOPARAM' Sepref_Translate.trans_step_keep_tac›
‹Sepref debug: Translation step, keep unsolved subgoals›
method_setup sepref_dbg_side = ‹SIMPLE_METHOD_NOPARAM' (fn ctxt => REPEAT_ALL_NEW_FWD (Sepref_Translate.side_cond_dispatch_tac false (K no_tac) ctxt))›
method_setup sepref_dbg_side_unfold = ‹SIMPLE_METHOD_NOPARAM' (Sepref_Translate.side_unfold_tac)›
method_setup sepref_dbg_side_keep = ‹SIMPLE_METHOD_NOPARAM' (fn ctxt => REPEAT_ALL_NEW_FWD (Sepref_Translate.side_cond_dispatch_tac true (K no_tac) ctxt))›
method_setup sepref_dbg_prepare_frame = ‹SIMPLE_METHOD_NOPARAM' Sepref_Frame.prepare_frame_tac›
‹Sepref debug: Prepare frame inference›
method_setup sepref_dbg_frame = ‹SIMPLE_METHOD_NOPARAM' (Sepref_Frame.frame_tac (Sepref_Translate.side_fallback_tac))›
‹Sepref debug: Frame inference›
method_setup sepref_dbg_merge = ‹SIMPLE_METHOD_NOPARAM' (Sepref_Frame.merge_tac (Sepref_Translate.side_fallback_tac))›
‹Sepref debug: Frame inference, merge›
method_setup sepref_dbg_frame_step = ‹SIMPLE_METHOD_NOPARAM' (Sepref_Frame.frame_step_tac (Sepref_Translate.side_fallback_tac) false)›
‹Sepref debug: Frame inference, single-step›
method_setup sepref_dbg_frame_step_keep = ‹SIMPLE_METHOD_NOPARAM' (Sepref_Frame.frame_step_tac (Sepref_Translate.side_fallback_tac) true)›
‹Sepref debug: Frame inference, single-step, keep partially solved side conditions›
subsection ‹Utilities›
subsubsection ‹Manual hfref-proofs›
method_setup sepref_to_hnr = ‹SIMPLE_METHOD_NOPARAM' (fn ctxt =>
Sepref.preproc_tac ctxt THEN' Sepref_Frame.weaken_post_tac ctxt)›
‹Sepref: Convert to hnr-goal and weaken postcondition›
method_setup sepref_to_hoare = ‹
let
fun sepref_to_hoare_tac ctxt = let
val ss = put_simpset HOL_basic_ss ctxt
addsimps @{thms hn_ctxt_def pure_def}
in
Sepref.preproc_tac ctxt
THEN' Sepref_Frame.weaken_post_tac ctxt
THEN' resolve_tac ctxt @{thms hn_refineI}
THEN' asm_full_simp_tac ss
end
in
SIMPLE_METHOD_NOPARAM' sepref_to_hoare_tac
end
› ‹Sepref: Convert to hoare-triple›
subsubsection ‹Copying of Parameters›
lemma fold_COPY: "x = COPY x" by simp
sepref_register COPY
text ‹Copy is treated as normal operator, and one can just declare rules for it! ›
lemma hnr_pure_COPY[sepref_fr_rules]:
"CONSTRAINT is_pure R ⟹ (return, RETURN o COPY) ∈ R⇧k →⇩a R"
by (sep_auto simp: is_pure_conv pure_def intro!: hfrefI hn_refineI)
subsubsection ‹Short-Circuit Boolean Evaluation›
text ‹Convert boolean operators to short-circuiting.
When applied before monadify, this will generate a short-circuit execution.›
lemma short_circuit_conv:
"(a ∧ b) ⟷ (if a then b else False)"
"(a ∨ b) ⟷ (if a then True else b)"
"(a⟶b) ⟷ (if a then b else True)"
by auto
subsubsection ‹Eliminating higher-order›
lemma ho_prod_move[sepref_preproc]: "case_prod (λa b x. f x a b) = (λp x. case_prod (f x) p)"
by (auto intro!: ext)
declare o_apply[sepref_preproc]
subsubsection ‹Precision Proofs›
text ‹
We provide a method that tries to extract equalities from
an assumption of the form
‹_ ⊨ P1 * … * Pn ∧⇩A P1' * … * Pn'›,
if it find a precision rule for Pi and Pi'.
The precision rules are extracted from the constraint rules.
TODO: Extracting the precision rules from the constraint rules
is not a clean solution. It might be better to collect precision rules
separately, and feed them into the constraint solver.
›
definition "prec_spec h Γ Γ' ≡ h ⊨ Γ * true ∧⇩A Γ' * true"
lemma prec_specI: "h ⊨ Γ ∧⇩A Γ' ⟹ prec_spec h Γ Γ'"
unfolding prec_spec_def
by (auto simp: mod_and_dist mod_star_trueI)
lemma prec_split1_aux: "A*B*true ⟹⇩A A*true"
apply (fr_rot 2, fr_rot_rhs 1)
apply (rule ent_star_mono)
by simp_all
lemma prec_split2_aux: "A*B*true ⟹⇩A B*true"
apply (fr_rot 1, fr_rot_rhs 1)
apply (rule ent_star_mono)
by simp_all
lemma prec_spec_splitE:
assumes "prec_spec h (A*B) (C*D)"
obtains "prec_spec h A C" "prec_spec h B D"
apply (thin_tac "⟦_;_⟧ ⟹ _")
apply (rule that)
using assms
apply -
unfolding prec_spec_def
apply (erule entailsD[rotated])
apply (rule ent_conjI)
apply (rule ent_conjE1)
apply (rule prec_split1_aux)
apply (rule ent_conjE2)
apply (rule prec_split1_aux)
apply (erule entailsD[rotated])
apply (rule ent_conjI)
apply (rule ent_conjE1)
apply (rule prec_split2_aux)
apply (rule ent_conjE2)
apply (rule prec_split2_aux)
done
lemma prec_specD:
assumes "precise R"
assumes "prec_spec h (R a p) (R a' p)"
shows "a=a'"
using assms unfolding precise_def prec_spec_def CONSTRAINT_def by blast
ML ‹
fun prec_extract_eqs_tac ctxt = let
fun is_precise thm = case Thm.concl_of thm of
@{mpat "Trueprop (precise _)"} => true
| _ => false
val thms = Sepref_Constraints.get_constraint_rules ctxt
@ Sepref_Constraints.get_safe_constraint_rules ctxt
val thms = thms
|> filter is_precise
val thms = @{thms snga_prec sngr_prec} @ thms
val thms = map (fn thm => thm RS @{thm prec_specD}) thms
val thin_prec_spec_rls = @{thms thin_rl[Pure.of "prec_spec a b c" for a b c]}
val tac =
forward_tac ctxt @{thms prec_specI}
THEN' REPEAT_ALL_NEW (ematch_tac ctxt @{thms prec_spec_splitE})
THEN' REPEAT o (dresolve_tac ctxt thms)
THEN' REPEAT o (eresolve_tac ctxt thin_prec_spec_rls )
in tac end
›
method_setup prec_extract_eqs = ‹SIMPLE_METHOD_NOPARAM' prec_extract_eqs_tac›
‹Extract equalities from "_ |= _ & _" assumption, using precision rules›
subsubsection ‹Combinator Rules›
lemma split_merge: "⟦A ∨⇩A B ⟹⇩t X; X ∨⇩A C ⟹⇩t D⟧ ⟹ (A ∨⇩A B ∨⇩A C ⟹⇩t D)"
proof -
assume a1: "X ∨⇩A C ⟹⇩t D"
assume "A ∨⇩A B ⟹⇩t X"
then have "A ∨⇩A B ⟹⇩A D * true"
using a1 by (meson ent_disjI1_direct ent_frame_fwd enttD entt_def_true)
then show ?thesis
using a1 by (metis (no_types) Assertions.ent_disjI2 ent_disjE enttD enttI semigroup.assoc sup.semigroup_axioms)
qed
ML ‹
fun prep_comb_rule thm = let
fun mrg t = case Logic.strip_assums_concl t of
@{mpat "Trueprop (_ ∨⇩A _ ∨⇩A _ ⟹⇩t _)"} => (@{thm split_merge},true)
| @{mpat "Trueprop (hn_refine _ _ ?G _ _)"} => (
if not (is_Var (head_of G)) then (@{thm hn_refine_cons_post}, true)
else (asm_rl,false)
)
| _ => (asm_rl,false)
val inst = Thm.prems_of thm |> map mrg
in
if exists snd inst then
prep_comb_rule (thm OF (map fst inst))
else
thm |> zero_var_indexes
end
›
attribute_setup sepref_prep_comb_rule = ‹Scan.succeed (Thm.rule_attribute [] (K prep_comb_rule))›
‹Preprocess combinator rule: Split merge-rules and add missing frame rules›
end
Theory Sepref_Chapter_Setup
chapter ‹Basic Setup›
text ‹This chapter contains the basic setup of the Sepref tool.›
theory Sepref_Chapter_Setup
imports Main
begin
end
Theory Sepref_HOL_Bindings
section ‹HOL Setup›
theory Sepref_HOL_Bindings
imports Sepref_Tool
begin
subsection ‹Assertion Annotation›
text ‹Annotate an assertion to a term. The term must then be refined with this assertion.›
definition ASSN_ANNOT :: "('a ⇒ 'ai ⇒ assn) ⇒ 'a ⇒ 'a" where [simp]: "ASSN_ANNOT A x ≡ x"
context fixes A :: "'a ⇒ 'ai ⇒ assn" begin
sepref_register "PR_CONST (ASSN_ANNOT A)"
lemma [def_pat_rules]: "ASSN_ANNOT$A ≡ UNPROTECT (ASSN_ANNOT A)" by simp
lemma [sepref_fr_rules]: "(return o (λx. x), RETURN o PR_CONST (ASSN_ANNOT A)) ∈ A⇧d→⇩aA"
by sepref_to_hoare sep_auto
end
lemma annotate_assn: "x ≡ ASSN_ANNOT A x" by simp
subsection ‹Shortcuts›
abbreviation "nat_assn ≡ (id_assn::nat ⇒ _)"
abbreviation "int_assn ≡ (id_assn::int ⇒ _)"
abbreviation "bool_assn ≡ (id_assn::bool ⇒ _)"
subsection ‹Identity Relations›
definition "IS_ID R ≡ R=Id"
definition "IS_BELOW_ID R ≡ R⊆Id"
lemma [safe_constraint_rules]:
"IS_ID Id"
"IS_ID R1 ⟹ IS_ID R2 ⟹ IS_ID (R1 → R2)"
"IS_ID R ⟹ IS_ID (⟨R⟩option_rel)"
"IS_ID R ⟹ IS_ID (⟨R⟩list_rel)"
"IS_ID R1 ⟹ IS_ID R2 ⟹ IS_ID (R1 ×⇩r R2)"
"IS_ID R1 ⟹ IS_ID R2 ⟹ IS_ID (⟨R1,R2⟩sum_rel)"
by (auto simp: IS_ID_def)
lemma [safe_constraint_rules]:
"IS_BELOW_ID Id"
"IS_BELOW_ID R ⟹ IS_BELOW_ID (⟨R⟩option_rel)"
"IS_BELOW_ID R1 ⟹ IS_BELOW_ID R2 ⟹ IS_BELOW_ID (R1 ×⇩r R2)"
"IS_BELOW_ID R1 ⟹ IS_BELOW_ID R2 ⟹ IS_BELOW_ID (⟨R1,R2⟩sum_rel)"
by (auto simp: IS_ID_def IS_BELOW_ID_def option_rel_def sum_rel_def list_rel_def)
lemma IS_BELOW_ID_fun_rel_aux: "R1⊇Id ⟹ IS_BELOW_ID R2 ⟹ IS_BELOW_ID (R1 → R2)"
by (auto simp: IS_BELOW_ID_def dest: fun_relD)
corollary IS_BELOW_ID_fun_rel[safe_constraint_rules]:
"IS_ID R1 ⟹ IS_BELOW_ID R2 ⟹ IS_BELOW_ID (R1 → R2)"
using IS_BELOW_ID_fun_rel_aux[of Id R2]
by (auto simp: IS_ID_def)
lemma IS_BELOW_ID_list_rel[safe_constraint_rules]:
"IS_BELOW_ID R ⟹ IS_BELOW_ID (⟨R⟩list_rel)"
unfolding IS_BELOW_ID_def
proof safe
fix l l'
assume A: "R⊆Id"
assume "(l,l')∈⟨R⟩list_rel"
thus "l=l'"
apply induction
using A by auto
qed
lemma IS_ID_imp_BELOW_ID[constraint_rules]:
"IS_ID R ⟹ IS_BELOW_ID R"
by (auto simp: IS_ID_def IS_BELOW_ID_def )
subsection ‹Inverse Relation›
lemma inv_fun_rel_eq[simp]: "(A→B)¯ = A¯→B¯"
by (auto dest: fun_relD)
lemma inv_option_rel_eq[simp]: "(⟨K⟩option_rel)¯ = ⟨K¯⟩option_rel"
by (auto simp: option_rel_def)
lemma inv_prod_rel_eq[simp]: "(P ×⇩r Q)¯ = P¯ ×⇩r Q¯"
by (auto)
lemma inv_sum_rel_eq[simp]: "(⟨P,Q⟩sum_rel)¯ = ⟨P¯,Q¯⟩sum_rel"
by (auto simp: sum_rel_def)
lemma inv_list_rel_eq[simp]: "(⟨R⟩list_rel)¯ = ⟨R¯⟩list_rel"
unfolding list_rel_def
apply safe
apply (subst list.rel_flip[symmetric])
apply (simp add: conversep_iff[abs_def])
apply (subst list.rel_flip[symmetric])
apply (simp add: conversep_iff[abs_def])
done
lemmas [constraint_simps] =
Relation.converse_Id
inv_fun_rel_eq
inv_option_rel_eq
inv_prod_rel_eq
inv_sum_rel_eq
inv_list_rel_eq
subsection ‹Single Valued and Total Relations›
definition "IS_LEFT_UNIQUE R ≡ single_valued (R¯)"
definition "IS_LEFT_TOTAL R ≡ Domain R = UNIV"
definition "IS_RIGHT_TOTAL R ≡ Range R = UNIV"
abbreviation (input) "IS_RIGHT_UNIQUE ≡ single_valued"
lemmas IS_RIGHT_UNIQUED = single_valuedD
lemma IS_LEFT_UNIQUED: "⟦IS_LEFT_UNIQUE r; (y, x) ∈ r; (z, x) ∈ r⟧ ⟹ y = z"
by (auto simp: IS_LEFT_UNIQUE_def dest: single_valuedD)
lemma prop2p:
"IS_LEFT_UNIQUE R = left_unique (rel2p R)"
"IS_RIGHT_UNIQUE R = right_unique (rel2p R)"
"right_unique (rel2p (R¯)) = left_unique (rel2p R)"
"IS_LEFT_TOTAL R = left_total (rel2p R)"
"IS_RIGHT_TOTAL R = right_total (rel2p R)"
by (auto
simp: IS_LEFT_UNIQUE_def left_unique_def single_valued_def
simp: right_unique_def
simp: IS_LEFT_TOTAL_def left_total_def
simp: IS_RIGHT_TOTAL_def right_total_def
simp: rel2p_def
)
lemma p2prop:
"left_unique P = IS_LEFT_UNIQUE (p2rel P)"
"right_unique P = IS_RIGHT_UNIQUE (p2rel P)"
"left_total P = IS_LEFT_TOTAL (p2rel P)"
"right_total P = IS_RIGHT_TOTAL (p2rel P)"
"bi_unique P ⟷ left_unique P ∧ right_unique P"
by (auto
simp: IS_LEFT_UNIQUE_def left_unique_def single_valued_def
simp: right_unique_def bi_unique_alt_def
simp: IS_LEFT_TOTAL_def left_total_def
simp: IS_RIGHT_TOTAL_def right_total_def
simp: p2rel_def
)
lemmas [safe_constraint_rules] =
single_valued_Id
prod_rel_sv
list_rel_sv
option_rel_sv
sum_rel_sv
lemma [safe_constraint_rules]:
"IS_LEFT_UNIQUE Id"
"IS_LEFT_UNIQUE R1 ⟹ IS_LEFT_UNIQUE R2 ⟹ IS_LEFT_UNIQUE (R1×⇩rR2)"
"IS_LEFT_UNIQUE R1 ⟹ IS_LEFT_UNIQUE R2 ⟹ IS_LEFT_UNIQUE (⟨R1,R2⟩sum_rel)"
"IS_LEFT_UNIQUE R ⟹ IS_LEFT_UNIQUE (⟨R⟩option_rel)"
"IS_LEFT_UNIQUE R ⟹ IS_LEFT_UNIQUE (⟨R⟩list_rel)"
by (auto simp: IS_LEFT_UNIQUE_def prod_rel_sv sum_rel_sv option_rel_sv list_rel_sv)
lemma IS_LEFT_TOTAL_alt: "IS_LEFT_TOTAL R ⟷ (∀x. ∃y. (x,y)∈R)"
by (auto simp: IS_LEFT_TOTAL_def)
lemma IS_RIGHT_TOTAL_alt: "IS_RIGHT_TOTAL R ⟷ (∀x. ∃y. (y,x)∈R)"
by (auto simp: IS_RIGHT_TOTAL_def)
lemma [safe_constraint_rules]:
"IS_LEFT_TOTAL Id"
"IS_LEFT_TOTAL R1 ⟹ IS_LEFT_TOTAL R2 ⟹ IS_LEFT_TOTAL (R1×⇩rR2)"
"IS_LEFT_TOTAL R1 ⟹ IS_LEFT_TOTAL R2 ⟹ IS_LEFT_TOTAL (⟨R1,R2⟩sum_rel)"
"IS_LEFT_TOTAL R ⟹ IS_LEFT_TOTAL (⟨R⟩option_rel)"
apply (auto simp: IS_LEFT_TOTAL_alt sum_rel_def option_rel_def list_rel_def)
apply (rename_tac x; case_tac x; auto)
apply (rename_tac x; case_tac x; auto)
done
lemma [safe_constraint_rules]: "IS_LEFT_TOTAL R ⟹ IS_LEFT_TOTAL (⟨R⟩list_rel)"
unfolding IS_LEFT_TOTAL_alt
proof safe
assume A: "∀x.∃y. (x,y)∈R"
fix l
show "∃l'. (l,l')∈⟨R⟩list_rel"
apply (induction l)
using A
by (auto simp: list_rel_split_right_iff)
qed
lemma [safe_constraint_rules]:
"IS_RIGHT_TOTAL Id"
"IS_RIGHT_TOTAL R1 ⟹ IS_RIGHT_TOTAL R2 ⟹ IS_RIGHT_TOTAL (R1×⇩rR2)"
"IS_RIGHT_TOTAL R1 ⟹ IS_RIGHT_TOTAL R2 ⟹ IS_RIGHT_TOTAL (⟨R1,R2⟩sum_rel)"
"IS_RIGHT_TOTAL R ⟹ IS_RIGHT_TOTAL (⟨R⟩option_rel)"
apply (auto simp: IS_RIGHT_TOTAL_alt sum_rel_def option_rel_def) []
apply (auto simp: IS_RIGHT_TOTAL_alt sum_rel_def option_rel_def) []
apply (auto simp: IS_RIGHT_TOTAL_alt sum_rel_def option_rel_def) []
apply (rename_tac x; case_tac x; auto)
apply (clarsimp simp: IS_RIGHT_TOTAL_alt option_rel_def)
apply (rename_tac x; case_tac x; auto)
done
lemma [safe_constraint_rules]: "IS_RIGHT_TOTAL R ⟹ IS_RIGHT_TOTAL (⟨R⟩list_rel)"
unfolding IS_RIGHT_TOTAL_alt
proof safe
assume A: "∀x.∃y. (y,x)∈R"
fix l
show "∃l'. (l',l)∈⟨R⟩list_rel"
apply (induction l)
using A
by (auto simp: list_rel_split_left_iff)
qed
lemma [constraint_simps]:
"IS_LEFT_TOTAL (R¯) ⟷ IS_RIGHT_TOTAL R "
"IS_RIGHT_TOTAL (R¯) ⟷ IS_LEFT_TOTAL R "
"IS_LEFT_UNIQUE (R¯) ⟷ IS_RIGHT_UNIQUE R"
"IS_RIGHT_UNIQUE (R¯) ⟷ IS_LEFT_UNIQUE R "
by (auto simp: IS_RIGHT_TOTAL_alt IS_LEFT_TOTAL_alt IS_LEFT_UNIQUE_def)
lemma [safe_constraint_rules]:
"IS_RIGHT_UNIQUE A ⟹ IS_RIGHT_TOTAL B ⟹ IS_RIGHT_TOTAL (A→B)"
"IS_RIGHT_TOTAL A ⟹ IS_RIGHT_UNIQUE B ⟹ IS_RIGHT_UNIQUE (A→B)"
"IS_LEFT_UNIQUE A ⟹ IS_LEFT_TOTAL B ⟹ IS_LEFT_TOTAL (A→B)"
"IS_LEFT_TOTAL A ⟹ IS_LEFT_UNIQUE B ⟹ IS_LEFT_UNIQUE (A→B)"
apply (simp_all add: prop2p rel2p)
apply (blast intro!: transfer_raw)+
done
lemma [constraint_rules]:
"IS_BELOW_ID R ⟹ IS_RIGHT_UNIQUE R"
"IS_BELOW_ID R ⟹ IS_LEFT_UNIQUE R"
"IS_ID R ⟹ IS_RIGHT_TOTAL R"
"IS_ID R ⟹ IS_LEFT_TOTAL R"
by (auto simp: IS_BELOW_ID_def IS_ID_def IS_LEFT_UNIQUE_def IS_RIGHT_TOTAL_def IS_LEFT_TOTAL_def
intro: single_valuedI)
thm constraint_rules
subsubsection ‹Additional Parametricity Lemmas›
lemma param_distinct[param]: "⟦IS_LEFT_UNIQUE A; IS_RIGHT_UNIQUE A⟧ ⟹ (distinct, distinct) ∈ ⟨A⟩list_rel → bool_rel"
apply (fold rel2p_def)
apply (simp add: rel2p)
apply (rule distinct_transfer)
apply (simp add: p2prop)
done
lemma param_Image[param]:
assumes "IS_LEFT_UNIQUE A" "IS_RIGHT_UNIQUE A"
shows "((``), (``)) ∈ ⟨A×⇩rB⟩set_rel → ⟨A⟩set_rel → ⟨B⟩set_rel"
apply (clarsimp simp: set_rel_def; intro conjI)
apply (fastforce dest: IS_RIGHT_UNIQUED[OF assms(2)])
apply (fastforce dest: IS_LEFT_UNIQUED[OF assms(1)])
done
lemma pres_eq_iff_svb: "((=),(=))∈K→K→bool_rel ⟷ (single_valued K ∧ single_valued (K¯))"
apply (safe intro!: single_valuedI)
apply (metis (full_types) IdD fun_relD1)
apply (metis (full_types) IdD fun_relD1)
by (auto dest: single_valuedD)
definition "IS_PRES_EQ R ≡ ((=), (=))∈R→R→bool_rel"
lemma [constraint_rules]: "⟦single_valued R; single_valued (R¯)⟧ ⟹ IS_PRES_EQ R"
by (simp add: pres_eq_iff_svb IS_PRES_EQ_def)
subsection ‹Bounded Assertions›
definition "b_rel R P ≡ R ∩ UNIV×Collect P"
definition "b_assn A P ≡ λx y. A x y * ↑(P x)"
lemma b_assn_pure_conv[constraint_simps]: "b_assn (pure R) P = pure (b_rel R P)"
by (auto intro!: ext simp: b_rel_def b_assn_def pure_def)
lemmas [sepref_import_rewrite, sepref_frame_normrel_eqs, fcomp_norm_unfold]
= b_assn_pure_conv[symmetric]
lemma b_rel_nesting[simp]:
"b_rel (b_rel R P1) P2 = b_rel R (λx. P1 x ∧ P2 x)"
by (auto simp: b_rel_def)
lemma b_rel_triv[simp]:
"b_rel R (λ_. True) = R"
by (auto simp: b_rel_def)
lemma b_assn_nesting[simp]:
"b_assn (b_assn A P1) P2 = b_assn A (λx. P1 x ∧ P2 x)"
by (auto simp: b_assn_def pure_def intro!: ext)
lemma b_assn_triv[simp]:
"b_assn A (λ_. True) = A"
by (auto simp: b_assn_def pure_def intro!: ext)
lemmas [simp,constraint_simps,sepref_import_rewrite, sepref_frame_normrel_eqs, fcomp_norm_unfold]
= b_rel_nesting b_assn_nesting
lemma b_rel_simp[simp]: "(x,y)∈b_rel R P ⟷ (x,y)∈R ∧ P y"
by (auto simp: b_rel_def)
lemma b_assn_simp[simp]: "b_assn A P x y = A x y * ↑(P x)"
by (auto simp: b_assn_def)
lemma b_rel_Range[simp]: "Range (b_rel R P) = Range R ∩ Collect P" by auto
lemma b_assn_rdom[simp]: "rdomp (b_assn R P) x ⟷ rdomp R x ∧ P x"
by (auto simp: rdomp_def)
lemma b_rel_below_id[constraint_rules]:
"IS_BELOW_ID R ⟹ IS_BELOW_ID (b_rel R P)"
by (auto simp: IS_BELOW_ID_def)
lemma b_rel_left_unique[constraint_rules]:
"IS_LEFT_UNIQUE R ⟹ IS_LEFT_UNIQUE (b_rel R P)"
by (auto simp: IS_LEFT_UNIQUE_def single_valued_def)
lemma b_rel_right_unique[constraint_rules]:
"IS_RIGHT_UNIQUE R ⟹ IS_RIGHT_UNIQUE (b_rel R P)"
by (auto simp: single_valued_def)
lemma b_assn_is_pure[safe_constraint_rules]:
"is_pure A ⟹ is_pure (b_assn A P)"
by (auto simp: is_pure_conv b_assn_pure_conv)
lemma b_assn_subtyping_match[sepref_frame_match_rules]:
assumes "hn_ctxt (b_assn A P) x y ⟹⇩t hn_ctxt A' x y"
assumes "⟦vassn_tag (hn_ctxt A x y); vassn_tag (hn_ctxt A' x y); P x⟧ ⟹ P' x"
shows "hn_ctxt (b_assn A P) x y ⟹⇩t hn_ctxt (b_assn A' P') x y"
using assms
unfolding hn_ctxt_def b_assn_def entailst_def entails_def
by (fastforce simp: vassn_tag_def mod_star_conv)
lemma b_assn_subtyping_match_eqA[sepref_frame_match_rules]:
assumes "⟦vassn_tag (hn_ctxt A x y); P x⟧ ⟹ P' x"
shows "hn_ctxt (b_assn A P) x y ⟹⇩t hn_ctxt (b_assn A P') x y"
apply (rule b_assn_subtyping_match)
subgoal
unfolding hn_ctxt_def b_assn_def entailst_def entails_def
by (fastforce simp: vassn_tag_def mod_star_conv)
subgoal
using assms .
done
lemma b_assn_subtyping_match_tR[sepref_frame_match_rules]:
assumes "⟦P x⟧ ⟹ hn_ctxt A x y ⟹⇩t hn_ctxt A' x y"
shows "hn_ctxt (b_assn A P) x y ⟹⇩t hn_ctxt A' x y"
using assms
unfolding hn_ctxt_def b_assn_def entailst_def entails_def
by (fastforce simp: vassn_tag_def mod_star_conv)
lemma b_assn_subtyping_match_tL[sepref_frame_match_rules]:
assumes "hn_ctxt A x y ⟹⇩t hn_ctxt A' x y"
assumes "⟦vassn_tag (hn_ctxt A x y)⟧ ⟹ P' x"
shows "hn_ctxt A x y ⟹⇩t hn_ctxt (b_assn A' P') x y"
using assms
unfolding hn_ctxt_def b_assn_def entailst_def entails_def
by (fastforce simp: vassn_tag_def mod_star_conv)
lemma b_assn_subtyping_match_eqA_tR[sepref_frame_match_rules]:
"hn_ctxt (b_assn A P) x y ⟹⇩t hn_ctxt A x y"
unfolding hn_ctxt_def b_assn_def
by (sep_auto intro!: enttI)
lemma b_assn_subtyping_match_eqA_tL[sepref_frame_match_rules]:
assumes "⟦vassn_tag (hn_ctxt A x y)⟧ ⟹ P' x"
shows "hn_ctxt A x y ⟹⇩t hn_ctxt (b_assn A P') x y"
using assms
unfolding hn_ctxt_def b_assn_def entailst_def entails_def
by (fastforce simp: vassn_tag_def mod_star_conv)
lemma b_rel_subtyping_merge[sepref_frame_merge_rules]:
assumes "hn_ctxt A x y ∨⇩A hn_ctxt A' x y ⟹⇩t hn_ctxt Am x y"
shows "hn_ctxt (b_assn A P) x y ∨⇩A hn_ctxt (b_assn A' P') x y ⟹⇩t hn_ctxt (b_assn Am (λx. P x ∨ P' x)) x y"
using assms
unfolding hn_ctxt_def b_assn_def entailst_def entails_def
by (fastforce simp: vassn_tag_def)
lemma b_rel_subtyping_merge_eqA[sepref_frame_merge_rules]:
shows "hn_ctxt (b_assn A P) x y ∨⇩A hn_ctxt (b_assn A P') x y ⟹⇩t hn_ctxt (b_assn A (λx. P x ∨ P' x)) x y"
apply (rule b_rel_subtyping_merge)
by simp
lemma b_rel_subtyping_merge_tL[sepref_frame_merge_rules]:
assumes "hn_ctxt A x y ∨⇩A hn_ctxt A' x y ⟹⇩t hn_ctxt Am x y"
shows "hn_ctxt A x y ∨⇩A hn_ctxt (b_assn A' P') x y ⟹⇩t hn_ctxt Am x y"
using b_rel_subtyping_merge[of A x y A' Am "λ_. True" P', simplified] assms .
lemma b_rel_subtyping_merge_tR[sepref_frame_merge_rules]:
assumes "hn_ctxt A x y ∨⇩A hn_ctxt A' x y ⟹⇩t hn_ctxt Am x y"
shows "hn_ctxt (b_assn A P) x y ∨⇩A hn_ctxt A' x y ⟹⇩t hn_ctxt Am x y"
using b_rel_subtyping_merge[of A x y A' Am P "λ_. True", simplified] assms .
lemma b_rel_subtyping_merge_eqA_tL[sepref_frame_merge_rules]:
shows "hn_ctxt A x y ∨⇩A hn_ctxt (b_assn A P') x y ⟹⇩t hn_ctxt A x y"
using b_rel_subtyping_merge_eqA[of A "λ_. True" x y P', simplified] .
lemma b_rel_subtyping_merge_eqA_tR[sepref_frame_merge_rules]:
shows "hn_ctxt (b_assn A P) x y ∨⇩A hn_ctxt A x y ⟹⇩t hn_ctxt A x y"
using b_rel_subtyping_merge_eqA[of A P x y "λ_. True", simplified] .
lemma b_assn_invalid_merge1: "hn_invalid (b_assn A P) x y ∨⇩A hn_invalid (b_assn A P') x y
⟹⇩t hn_invalid (b_assn A (λx. P x ∨ P' x)) x y"
by (sep_auto simp: hn_ctxt_def invalid_assn_def entailst_def)
lemma b_assn_invalid_merge2: "hn_invalid (b_assn A P) x y ∨⇩A hn_invalid A x y
⟹⇩t hn_invalid A x y"
by (sep_auto simp: hn_ctxt_def invalid_assn_def entailst_def)
lemma b_assn_invalid_merge3: "hn_invalid A x y ∨⇩A hn_invalid (b_assn A P) x y
⟹⇩t hn_invalid A x y"
by (sep_auto simp: hn_ctxt_def invalid_assn_def entailst_def)
lemma b_assn_invalid_merge4: "hn_invalid (b_assn A P) x y ∨⇩A hn_ctxt (b_assn A P') x y
⟹⇩t hn_invalid (b_assn A (λx. P x ∨ P' x)) x y"
by (sep_auto simp: hn_ctxt_def invalid_assn_def entailst_def)
lemma b_assn_invalid_merge5: "hn_ctxt (b_assn A P') x y ∨⇩A hn_invalid (b_assn A P) x y
⟹⇩t hn_invalid (b_assn A (λx. P x ∨ P' x)) x y"
by (sep_auto simp: hn_ctxt_def invalid_assn_def entailst_def)
lemma b_assn_invalid_merge6: "hn_invalid (b_assn A P) x y ∨⇩A hn_ctxt A x y
⟹⇩t hn_invalid A x y"
by (sep_auto simp: hn_ctxt_def invalid_assn_def entailst_def)
lemma b_assn_invalid_merge7: "hn_ctxt A x y ∨⇩A hn_invalid (b_assn A P) x y
⟹⇩t hn_invalid A x y"
by (sep_auto simp: hn_ctxt_def invalid_assn_def entailst_def)
lemma b_assn_invalid_merge8: "hn_ctxt (b_assn A P) x y ∨⇩A hn_invalid A x y
⟹⇩t hn_invalid A x y"
by (sep_auto simp: hn_ctxt_def invalid_assn_def entailst_def)
lemma b_assn_invalid_merge9: "hn_invalid A x y ∨⇩A hn_ctxt (b_assn A P) x y
⟹⇩t hn_invalid A x y"
by (sep_auto simp: hn_ctxt_def invalid_assn_def entailst_def)
lemmas b_assn_invalid_merge[sepref_frame_merge_rules] =
b_assn_invalid_merge1
b_assn_invalid_merge2
b_assn_invalid_merge3
b_assn_invalid_merge4
b_assn_invalid_merge5
b_assn_invalid_merge6
b_assn_invalid_merge7
b_assn_invalid_merge8
b_assn_invalid_merge9
abbreviation nbn_rel :: "nat ⇒ (nat × nat) set"
where "nbn_rel n ≡ b_rel nat_rel (λx::nat. x<n)"
abbreviation nbn_assn :: "nat ⇒ nat ⇒ nat ⇒ assn"
where "nbn_assn n ≡ b_assn nat_assn (λx::nat. x<n)"
subsection ‹Tool Setup›
lemmas [sepref_relprops] =
sepref_relpropI[of IS_LEFT_UNIQUE]
sepref_relpropI[of IS_RIGHT_UNIQUE]
sepref_relpropI[of IS_LEFT_TOTAL]
sepref_relpropI[of IS_RIGHT_TOTAL]
sepref_relpropI[of is_pure]
sepref_relpropI[of "IS_PURE Φ" for Φ]
sepref_relpropI[of IS_ID]
sepref_relpropI[of IS_BELOW_ID]
lemma [sepref_relprops_simps]:
"CONSTRAINT (IS_PURE IS_ID) A ⟹ CONSTRAINT (IS_PURE IS_BELOW_ID) A"
"CONSTRAINT (IS_PURE IS_ID) A ⟹ CONSTRAINT (IS_PURE IS_LEFT_TOTAL) A"
"CONSTRAINT (IS_PURE IS_ID) A ⟹ CONSTRAINT (IS_PURE IS_RIGHT_TOTAL) A"
"CONSTRAINT (IS_PURE IS_BELOW_ID) A ⟹ CONSTRAINT (IS_PURE IS_LEFT_UNIQUE) A"
"CONSTRAINT (IS_PURE IS_BELOW_ID) A ⟹ CONSTRAINT (IS_PURE IS_RIGHT_UNIQUE) A"
by (auto
simp: IS_ID_def IS_BELOW_ID_def IS_PURE_def IS_LEFT_UNIQUE_def
simp: IS_LEFT_TOTAL_def IS_RIGHT_TOTAL_def
simp: single_valued_below_Id)
declare True_implies_equals[sepref_relprops_simps]
lemma [sepref_relprops_transform]: "single_valued (R¯) = IS_LEFT_UNIQUE R"
by (auto simp: IS_LEFT_UNIQUE_def)
subsection ‹HOL Combinators›
lemma hn_if[sepref_comb_rules]:
assumes P: "Γ ⟹⇩t Γ1 * hn_val bool_rel a a'"
assumes RT: "a ⟹ hn_refine (Γ1 * hn_val bool_rel a a') b' Γ2b R b"
assumes RE: "¬a ⟹ hn_refine (Γ1 * hn_val bool_rel a a') c' Γ2c R c"
assumes IMP: "TERM If ⟹ Γ2b ∨⇩A Γ2c ⟹⇩t Γ'"
shows "hn_refine Γ (if a' then b' else c') Γ' R (If$a$b$c)"
using P RT RE IMP[OF TERMI]
unfolding APP_def PROTECT2_def
by (rule hnr_If)
lemmas [sepref_opt_simps] = if_True if_False
lemma hn_let[sepref_comb_rules]:
assumes P: "Γ ⟹⇩t Γ1 * hn_ctxt R v v'"
assumes R: "⋀x x'. x=v ⟹ hn_refine (Γ1 * hn_ctxt R x x') (f' x')
(Γ' x x') R2 (f x)"
assumes F: "⋀x x'. Γ' x x' ⟹⇩t Γ2 * hn_ctxt R' x x'"
shows
"hn_refine Γ (Let v' f') (Γ2 * hn_ctxt R' v v') R2 (Let$v$(λ⇩2x. f x))"
apply (rule hn_refine_cons[OF P _ F entt_refl])
apply (simp)
apply (rule R)
by simp
subsection ‹Basic HOL types›
lemma hnr_default[sepref_import_param]: "(default,default)∈Id" by simp
lemma unit_hnr[sepref_import_param]: "((),())∈unit_rel" by auto
lemmas [sepref_import_param] =
param_bool
param_nat1
param_int
lemmas [id_rules] =
itypeI[Pure.of 0 "TYPE (nat)"]
itypeI[Pure.of 0 "TYPE (int)"]
itypeI[Pure.of 1 "TYPE (nat)"]
itypeI[Pure.of 1 "TYPE (int)"]
itypeI[Pure.of numeral "TYPE (num ⇒ nat)"]
itypeI[Pure.of numeral "TYPE (num ⇒ int)"]
itype_self[of num.One]
itype_self[of num.Bit0]
itype_self[of num.Bit1]
lemma param_min_nat[param,sepref_import_param]: "(min,min)∈nat_rel → nat_rel → nat_rel" by auto
lemma param_max_nat[param,sepref_import_param]: "(max,max)∈nat_rel → nat_rel → nat_rel" by auto
lemma param_min_int[param,sepref_import_param]: "(min,min)∈int_rel → int_rel → int_rel" by auto
lemma param_max_int[param,sepref_import_param]: "(max,max)∈int_rel → int_rel → int_rel" by auto
lemma uminus_hnr[sepref_import_param]: "(uminus,uminus)∈int_rel → int_rel" by auto
lemma nat_param[param,sepref_import_param]: "(nat,nat) ∈ int_rel → nat_rel" by auto
lemma int_param[param,sepref_import_param]: "(int,int) ∈ nat_rel → int_rel" by auto
subsection "Product"
lemmas [sepref_import_rewrite, sepref_frame_normrel_eqs, fcomp_norm_unfold] = prod_assn_pure_conv[symmetric]
lemma prod_assn_precise[constraint_rules]:
"precise P1 ⟹ precise P2 ⟹ precise (prod_assn P1 P2)"
apply rule
apply (clarsimp simp: prod_assn_def star_assoc)
apply safe
apply (erule (1) prec_frame) apply frame_inference+
apply (erule (1) prec_frame) apply frame_inference+
done
lemma
"precise P1 ⟹ precise P2 ⟹ precise (prod_assn P1 P2)"
apply rule
apply (clarsimp simp: prod_assn_def)
proof (rule conjI)
fix F F' h as a b a' b' ap bp
assume P1: "precise P1" and P2: "precise P2"
assume F: "(h, as) ⊨ P1 a ap * P2 b bp * F ∧⇩A P1 a' ap * P2 b' bp * F'"
from F have "(h, as) ⊨ P1 a ap * (P2 b bp * F) ∧⇩A P1 a' ap * (P2 b' bp * F')"
by (simp only: mult.assoc)
with preciseD[OF P1] show "a=a'" .
from F have "(h, as) ⊨ P2 b bp * (P1 a ap * F) ∧⇩A P2 b' bp * (P1 a' ap * F')"
by (simp only: mult.assoc[where 'a=assn] mult.commute[where 'a=assn] mult.left_commute[where 'a=assn])
with preciseD[OF P2] show "b=b'" .
qed
lemma intf_of_prod_assn[intf_of_assn]:
assumes "intf_of_assn A TYPE('a)" "intf_of_assn B TYPE('b)"
shows "intf_of_assn (prod_assn A B) TYPE('a * 'b)"
by simp
lemma pure_prod[constraint_rules]:
assumes P1: "is_pure P1" and P2: "is_pure P2"
shows "is_pure (prod_assn P1 P2)"
proof -
from P1 obtain P1' where P1': "⋀x x'. P1 x x' = ↑(P1' x x')"
using is_pureE by blast
from P2 obtain P2' where P2': "⋀x x'. P2 x x' = ↑(P2' x x')"
using is_pureE by blast
show ?thesis proof
fix x x'
show "prod_assn P1 P2 x x' =
↑ (case (x, x') of ((a1, a2), c1, c2) ⇒ P1' a1 c1 ∧ P2' a2 c2)"
unfolding prod_assn_def
apply (simp add: P1' P2' split: prod.split)
done
qed
qed
lemma prod_frame_match[sepref_frame_match_rules]:
assumes "hn_ctxt A (fst x) (fst y) ⟹⇩t hn_ctxt A' (fst x) (fst y)"
assumes "hn_ctxt B (snd x) (snd y) ⟹⇩t hn_ctxt B' (snd x) (snd y)"
shows "hn_ctxt (prod_assn A B) x y ⟹⇩t hn_ctxt (prod_assn A' B') x y"
apply (cases x; cases y; simp)
apply (simp add: hn_ctxt_def)
apply (rule entt_star_mono)
using assms apply (auto simp: hn_ctxt_def)
done
lemma prod_frame_merge[sepref_frame_merge_rules]:
assumes "hn_ctxt A (fst x) (fst y) ∨⇩A hn_ctxt A' (fst x) (fst y) ⟹⇩t hn_ctxt Am (fst x) (fst y)"
assumes "hn_ctxt B (snd x) (snd y) ∨⇩A hn_ctxt B' (snd x) (snd y) ⟹⇩t hn_ctxt Bm (snd x) (snd y)"
shows "hn_ctxt (prod_assn A B) x y ∨⇩A hn_ctxt (prod_assn A' B') x y ⟹⇩t hn_ctxt (prod_assn Am Bm) x y"
by (blast intro: entt_disjE prod_frame_match
entt_disjD1[OF assms(1)] entt_disjD2[OF assms(1)]
entt_disjD1[OF assms(2)] entt_disjD2[OF assms(2)])
lemma entt_invalid_prod: "hn_invalid (prod_assn A B) p p' ⟹⇩t hn_ctxt (prod_assn (invalid_assn A) (invalid_assn B)) p p'"
apply (simp add: hn_ctxt_def invalid_assn_def[abs_def])
apply (rule enttI)
apply clarsimp
apply (cases p; cases p'; auto simp: mod_star_conv pure_def)
done
lemmas invalid_prod_merge[sepref_frame_merge_rules] = gen_merge_cons[OF entt_invalid_prod]
lemma prod_assn_ctxt: "prod_assn A1 A2 x y = z ⟹ hn_ctxt (prod_assn A1 A2) x y = z"
by (simp add: hn_ctxt_def)
lemma hn_case_prod'[sepref_prep_comb_rule,sepref_comb_rules]:
assumes FR: "Γ⟹⇩thn_ctxt (prod_assn P1 P2) p' p * Γ1"
assumes Pair: "⋀a1 a2 a1' a2'. ⟦p'=(a1',a2')⟧
⟹ hn_refine (hn_ctxt P1 a1' a1 * hn_ctxt P2 a2' a2 * Γ1 * hn_invalid (prod_assn P1 P2) p' p) (f a1 a2)
(hn_ctxt P1' a1' a1 * hn_ctxt P2' a2' a2 * hn_ctxt XX1 p' p * Γ1') R (f' a1' a2')"
shows "hn_refine Γ (case_prod f p) (hn_ctxt (prod_assn P1' P2') p' p * Γ1')
R (case_prod$(λ⇩2a b. f' a b)$p')" (is "?G Γ")
apply1 (rule hn_refine_cons_pre[OF FR])
apply1 extract_hnr_invalids
apply1 (cases p; cases p'; simp add: prod_assn_pair_conv[THEN prod_assn_ctxt])
apply (rule hn_refine_cons[OF _ Pair _ entt_refl])
applyS (simp add: hn_ctxt_def)
applyS simp
applyS (simp add: hn_ctxt_def entt_fr_refl entt_fr_drop)
done
lemma hn_case_prod_old:
assumes P: "Γ⟹⇩tΓ1 * hn_ctxt (prod_assn P1 P2) p' p"
assumes R: "⋀a1 a2 a1' a2'. ⟦p'=(a1',a2')⟧
⟹ hn_refine (Γ1 * hn_ctxt P1 a1' a1 * hn_ctxt P2 a2' a2 * hn_invalid (prod_assn P1 P2) p' p) (f a1 a2)
(Γh a1 a1' a2 a2') R (f' a1' a2')"
assumes M: "⋀a1 a1' a2 a2'. Γh a1 a1' a2 a2'
⟹⇩t Γ' * hn_ctxt P1' a1' a1 * hn_ctxt P2' a2' a2 * hn_ctxt Pxx p' p"
shows "hn_refine Γ (case_prod f p) (Γ' * hn_ctxt (prod_assn P1' P2') p' p)
R (case_prod$(λ⇩2a b. f' a b)$p')"
apply1 (cases p; cases p'; simp)
apply1 (rule hn_refine_cons_pre[OF P])
apply (rule hn_refine_preI)
apply (simp add: hn_ctxt_def assn_aci)
apply (rule hn_refine_cons[OF _ R])
apply1 (rule enttI)
applyS (sep_auto simp add: hn_ctxt_def invalid_assn_def mod_star_conv)
applyS simp
apply1 (rule entt_trans[OF M])
applyS (sep_auto intro!: enttI simp: hn_ctxt_def)
applyS simp
done
lemma hn_Pair[sepref_fr_rules]: "hn_refine
(hn_ctxt P1 x1 x1' * hn_ctxt P2 x2 x2')
(return (x1',x2'))
(hn_invalid P1 x1 x1' * hn_invalid P2 x2 x2')
(prod_assn P1 P2)
(RETURN$(Pair$x1$x2))"
unfolding hn_refine_def
apply (sep_auto simp: hn_ctxt_def prod_assn_def)
apply (rule ent_frame_fwd[OF invalidate_clone'[of P1]], frame_inference)
apply (rule ent_frame_fwd[OF invalidate_clone'[of P2]], frame_inference)
apply sep_auto
done
lemma fst_hnr[sepref_fr_rules]: "(return o fst,RETURN o fst) ∈ (prod_assn A B)⇧d →⇩a A"
by sepref_to_hoare sep_auto
lemma snd_hnr[sepref_fr_rules]: "(return o snd,RETURN o snd) ∈ (prod_assn A B)⇧d →⇩a B"
by sepref_to_hoare sep_auto
lemmas [constraint_simps] = prod_assn_pure_conv
lemmas [sepref_import_param] = param_prod_swap
lemma rdomp_prodD[dest!]: "rdomp (prod_assn A B) (a,b) ⟹ rdomp A a ∧ rdomp B b"
unfolding rdomp_def prod_assn_def
by (sep_auto simp: mod_star_conv)
subsection "Option"
fun option_assn :: "('a ⇒ 'c ⇒ assn) ⇒ 'a option ⇒ 'c option ⇒ assn" where
"option_assn P None None = emp"
| "option_assn P (Some a) (Some c) = P a c"
| "option_assn _ _ _ = false"
lemma option_assn_simps[simp]:
"option_assn P None v' = ↑(v'=None)"
"option_assn P v None = ↑(v=None)"
apply (cases v', simp_all)
apply (cases v, simp_all)
done
lemma option_assn_alt_def: "option_assn R a b =
(case (a,b) of (Some x, Some y) ⇒ R x y
| (None,None) ⇒ emp
| _ ⇒ false)"
by (auto split: option.split)
lemma option_assn_pure_conv[constraint_simps]: "option_assn (pure R) = pure (⟨R⟩option_rel)"
apply (intro ext)
apply (rename_tac a c)
apply (case_tac "(pure R,a,c)" rule: option_assn.cases)
by (auto simp: pure_def)
lemmas [sepref_import_rewrite, sepref_frame_normrel_eqs, fcomp_norm_unfold] = option_assn_pure_conv[symmetric]
lemma hr_comp_option_conv[simp, fcomp_norm_unfold]: "
hr_comp (option_assn R) (⟨R'⟩option_rel)
= option_assn (hr_comp R R')"
unfolding hr_comp_def[abs_def]
apply (intro ext ent_iffI)
apply solve_entails
apply (case_tac "(R,b,c)" rule: option_assn.cases)
apply clarsimp_all
apply (sep_auto simp: option_assn_alt_def split: option.splits)
apply (clarsimp simp: option_assn_alt_def split: option.splits; safe)
apply (sep_auto split: option.splits)
apply (intro ent_ex_preI)
apply (rule ent_ex_postI)
apply (sep_auto split: option.splits)
done
lemma option_assn_precise[safe_constraint_rules]:
assumes "precise P"
shows "precise (option_assn P)"
proof
fix a a' p h F F'
assume A: "h ⊨ option_assn P a p * F ∧⇩A option_assn P a' p * F'"
thus "a=a'" proof (cases "(P,a,p)" rule: option_assn.cases)
case (2 _ av pv) hence [simp]: "a=Some av" "p=Some pv" by simp_all
from A obtain av' where [simp]: "a'=Some av'" by (cases a', simp_all)
from A have "h ⊨ P av pv * F ∧⇩A P av' pv * F'" by simp
with ‹precise P› have "av=av'" by (rule preciseD)
thus ?thesis by simp
qed simp_all
qed
lemma pure_option[safe_constraint_rules]:
assumes P: "is_pure P"
shows "is_pure (option_assn P)"
proof -
from P obtain P' where P': "⋀x x'. P x x' = ↑(P' x x')"
using is_pureE by blast
show ?thesis proof
fix x x'
show "option_assn P x x' =
↑ (case (x, x') of
(None,None) ⇒ True | (Some v, Some v') ⇒ P' v v' | _ ⇒ False
)"
apply (simp add: P' split: prod.split option.split)
done
qed
qed
lemma hn_ctxt_option: "option_assn A x y = z ⟹ hn_ctxt (option_assn A) x y = z"
by (simp add: hn_ctxt_def)
lemma hn_case_option[sepref_prep_comb_rule, sepref_comb_rules]:
fixes p p' P
defines [simp]: "INVE ≡ hn_invalid (option_assn P) p p'"
assumes FR: "Γ ⟹⇩t hn_ctxt (option_assn P) p p' * F"
assumes Rn: "p=None ⟹ hn_refine (hn_ctxt (option_assn P) p p' * F) f1' (hn_ctxt XX1 p p' * Γ1') R f1"
assumes Rs: "⋀x x'. ⟦ p=Some x; p'=Some x' ⟧ ⟹
hn_refine (hn_ctxt P x x' * INVE * F) (f2' x') (hn_ctxt P' x x' * hn_ctxt XX2 p p' * Γ2') R (f2 x)"
assumes MERGE1: "Γ1' ∨⇩A Γ2' ⟹⇩t Γ'"
shows "hn_refine Γ (case_option f1' f2' p') (hn_ctxt (option_assn P') p p' * Γ') R (case_option$f1$(λ⇩2x. f2 x)$p)"
apply (rule hn_refine_cons_pre[OF FR])
apply1 extract_hnr_invalids
apply (cases p; cases p'; simp add: option_assn.simps[THEN hn_ctxt_option])
subgoal
apply (rule hn_refine_cons[OF _ Rn _ entt_refl]; assumption?)
applyS (simp add: hn_ctxt_def)
apply (subst mult.commute, rule entt_fr_drop)
apply (rule entt_trans[OF _ MERGE1])
apply (simp add: ent_disjI1' ent_disjI2')
done
subgoal
apply (rule hn_refine_cons[OF _ Rs _ entt_refl]; assumption?)
applyS (simp add: hn_ctxt_def)
apply (rule entt_star_mono)
apply1 (rule entt_fr_drop)
applyS (simp add: hn_ctxt_def)
apply1 (rule entt_trans[OF _ MERGE1])
applyS (simp add: hn_ctxt_def)
done
done
lemma hn_None[sepref_fr_rules]:
"hn_refine emp (return None) emp (option_assn P) (RETURN$None)"
by rule sep_auto
lemma hn_Some[sepref_fr_rules]: "hn_refine
(hn_ctxt P v v')
(return (Some v'))
(hn_invalid P v v')
(option_assn P)
(RETURN$(Some$v))"
by rule (sep_auto simp: hn_ctxt_def invalidate_clone')
definition "imp_option_eq eq a b ≡ case (a,b) of
(None,None) ⇒ return True
| (Some a, Some b) ⇒ eq a b
| _ ⇒ return False"
lemma option_assn_eq[sepref_comb_rules]:
fixes a b :: "'a option"
assumes F1: "Γ ⟹⇩t hn_ctxt (option_assn P) a a' * hn_ctxt (option_assn P) b b' * Γ1"
assumes EQ: "⋀va va' vb vb'. hn_refine
(hn_ctxt P va va' * hn_ctxt P vb vb' * Γ1)
(eq' va' vb')
(Γ' va va' vb vb')
bool_assn
(RETURN$((=) $va$vb))"
assumes F2:
"⋀va va' vb vb'.
Γ' va va' vb vb' ⟹⇩t hn_ctxt P va va' * hn_ctxt P vb vb' * Γ1"
shows "hn_refine
Γ
(imp_option_eq eq' a' b')
(hn_ctxt (option_assn P) a a' * hn_ctxt (option_assn P) b b' * Γ1)
bool_assn
(RETURN$((=) $a$b))"
apply (rule hn_refine_cons_pre[OF F1])
unfolding imp_option_eq_def
apply rule
apply (simp split: option.split add: hn_ctxt_def, intro impI conjI)
apply (sep_auto split: option.split simp: hn_ctxt_def pure_def)
apply (cases a, (sep_auto split: option.split simp: hn_ctxt_def pure_def)+)[]
apply (cases a, (sep_auto split: option.split simp: hn_ctxt_def pure_def)+)[]
apply (cases b, (sep_auto split: option.split simp: hn_ctxt_def pure_def)+)[]
apply (rule cons_post_rule)
apply (rule hn_refineD[OF EQ[unfolded hn_ctxt_def]])
apply simp
apply (rule ent_frame_fwd[OF F2[THEN enttD,unfolded hn_ctxt_def]])
apply (fr_rot 2)
apply (fr_rot_rhs 1)
apply (rule fr_refl)
apply (rule ent_refl)
apply (sep_auto simp: pure_def)
done
lemma [pat_rules]:
"(=) $a$None ≡ is_None$a"
"(=) $None$a ≡ is_None$a"
apply (rule eq_reflection, simp split: option.split)+
done
lemma hn_is_None[sepref_fr_rules]: "hn_refine
(hn_ctxt (option_assn P) a a')
(return (is_None a'))
(hn_ctxt (option_assn P) a a')
bool_assn
(RETURN$(is_None$a))"
apply rule
apply (sep_auto split: option.split simp: hn_ctxt_def pure_def)
done
lemma (in -) sepref_the_complete[sepref_fr_rules]:
assumes "x≠None"
shows "hn_refine
(hn_ctxt (option_assn R) x xi)
(return (the xi))
(hn_invalid (option_assn R) x xi)
(R)
(RETURN$(the$x))"
using assms
apply (cases x)
apply simp
apply (cases xi)
apply (simp add: hn_ctxt_def)
apply rule
apply (sep_auto simp: hn_ctxt_def invalidate_clone' vassn_tagI invalid_assn_const)
done
lemma (in -) sepref_the_id:
assumes "CONSTRAINT (IS_PURE IS_ID) R"
shows "hn_refine
(hn_ctxt (option_assn R) x xi)
(return (the xi))
(hn_ctxt (option_assn R) x xi)
(R)
(RETURN$(the$x))"
using assms
apply (clarsimp simp: IS_PURE_def IS_ID_def hn_ctxt_def is_pure_conv)
apply (cases x)
apply simp
apply (cases xi)
apply (simp add: hn_ctxt_def invalid_assn_def)
apply rule apply (sep_auto simp: pure_def)
apply rule apply (sep_auto)
apply (simp add: option_assn_pure_conv)
apply rule apply (sep_auto simp: pure_def invalid_assn_def)
done
subsection "Lists"
fun list_assn :: "('a ⇒ 'c ⇒ assn) ⇒ 'a list ⇒ 'c list ⇒ assn" where
"list_assn P [] [] = emp"
| "list_assn P (a#as) (c#cs) = P a c * list_assn P as cs"
| "list_assn _ _ _ = false"
lemma list_assn_aux_simps[simp]:
"list_assn P [] l' = (↑(l'=[]))"
"list_assn P l [] = (↑(l=[]))"
unfolding hn_ctxt_def
apply (cases l')
apply simp
apply simp
apply (cases l)
apply simp
apply simp
done
lemma list_assn_aux_append[simp]:
"length l1=length l1' ⟹
list_assn P (l1@l2) (l1'@l2')
= list_assn P l1 l1' * list_assn P l2 l2'"
apply (induct rule: list_induct2)
apply simp
apply (simp add: star_assoc)
done
lemma list_assn_aux_ineq_len: "length l ≠ length li ⟹ list_assn A l li = false"
proof (induction l arbitrary: li)
case (Cons x l li) thus ?case by (cases li; auto)
qed simp
lemma list_assn_aux_append2[simp]:
assumes "length l2=length l2'"
shows "list_assn P (l1@l2) (l1'@l2')
= list_assn P l1 l1' * list_assn P l2 l2'"
apply (cases "length l1 = length l1'")
apply (erule list_assn_aux_append)
apply (simp add: list_assn_aux_ineq_len assms)
done
lemma list_assn_pure_conv[constraint_simps]: "list_assn (pure R) = pure (⟨R⟩list_rel)"
proof (intro ext)
fix l li
show "list_assn (pure R) l li = pure (⟨R⟩list_rel) l li"
apply (induction "pure R" l li rule: list_assn.induct)
by (auto simp: pure_def)
qed
lemmas [sepref_import_rewrite, sepref_frame_normrel_eqs, fcomp_norm_unfold] = list_assn_pure_conv[symmetric]
lemma list_assn_simps[simp]:
"hn_ctxt (list_assn P) [] l' = (↑(l'=[]))"
"hn_ctxt (list_assn P) l [] = (↑(l=[]))"
"hn_ctxt (list_assn P) [] [] = emp"
"hn_ctxt (list_assn P) (a#as) (c#cs) = hn_ctxt P a c * hn_ctxt (list_assn P) as cs"
"hn_ctxt (list_assn P) (a#as) [] = false"
"hn_ctxt (list_assn P) [] (c#cs) = false"
unfolding hn_ctxt_def
apply (cases l')
apply simp
apply simp
apply (cases l)
apply simp
apply simp
apply simp_all
done
lemma list_assn_precise[constraint_rules]: "precise P ⟹ precise (list_assn P)"
proof
fix l1 l2 l h F1 F2
assume P: "precise P"
assume "h⊨list_assn P l1 l * F1 ∧⇩A list_assn P l2 l * F2"
thus "l1=l2"
proof (induct l arbitrary: l1 l2 F1 F2)
case Nil thus ?case by simp
next
case (Cons a ls)
from Cons obtain a1 ls1 where [simp]: "l1=a1#ls1"
by (cases l1, simp)
from Cons obtain a2 ls2 where [simp]: "l2=a2#ls2"
by (cases l2, simp)
from Cons.prems have M:
"h ⊨ P a1 a * list_assn P ls1 ls * F1
∧⇩A P a2 a * list_assn P ls2 ls * F2" by simp
have "a1=a2"
apply (rule preciseD[OF P, where a=a1 and a'=a2 and p=a
and F= "list_assn P ls1 ls * F1"
and F'="list_assn P ls2 ls * F2"
])
using M
by (simp add: star_assoc)
moreover have "ls1=ls2"
apply (rule Cons.hyps[where ?F1.0="P a1 a * F1" and ?F2.0="P a2 a * F2"])
using M
by (simp only: star_aci)
ultimately show ?case by simp
qed
qed
lemma list_assn_pure[constraint_rules]:
assumes P: "is_pure P"
shows "is_pure (list_assn P)"
proof -
from P obtain P' where P_eq: "⋀x x'. P x x' = ↑(P' x x')"
by (rule is_pureE) blast
{
fix l l'
have "list_assn P l l' = ↑(list_all2 P' l l')"
by (induct P≡P l l' rule: list_assn.induct)
(simp_all add: P_eq)
} thus ?thesis by rule
qed
lemma list_assn_mono:
"⟦⋀x x'. P x x'⟹⇩AP' x x'⟧ ⟹ list_assn P l l' ⟹⇩A list_assn P' l l'"
unfolding hn_ctxt_def
apply (induct P l l' rule: list_assn.induct)
by (auto intro: ent_star_mono)
lemma list_assn_monot:
"⟦⋀x x'. P x x'⟹⇩tP' x x'⟧ ⟹ list_assn P l l' ⟹⇩t list_assn P' l l'"
unfolding hn_ctxt_def
apply (induct P l l' rule: list_assn.induct)
by (auto intro: entt_star_mono)
lemma list_match_cong[sepref_frame_match_rules]:
"⟦⋀x x'. ⟦x∈set l; x'∈set l'⟧ ⟹ hn_ctxt A x x' ⟹⇩t hn_ctxt A' x x' ⟧ ⟹ hn_ctxt (list_assn A) l l' ⟹⇩t hn_ctxt (list_assn A') l l'"
unfolding hn_ctxt_def
by (induct A l l' rule: list_assn.induct) (simp_all add: entt_star_mono)
lemma list_merge_cong[sepref_frame_merge_rules]:
assumes "⋀x x'. ⟦x∈set l; x'∈set l'⟧ ⟹ hn_ctxt A x x' ∨⇩A hn_ctxt A' x x' ⟹⇩t hn_ctxt Am x x'"
shows "hn_ctxt (list_assn A) l l' ∨⇩A hn_ctxt (list_assn A') l l' ⟹⇩t hn_ctxt (list_assn Am) l l'"
apply (blast intro: entt_disjE list_match_cong entt_disjD1[OF assms] entt_disjD2[OF assms])
done
lemma invalid_list_split:
"invalid_assn (list_assn A) (x#xs) (y#ys) ⟹⇩t invalid_assn A x y * invalid_assn (list_assn A) xs ys"
by (fastforce simp: invalid_assn_def intro!: enttI simp: mod_star_conv)
lemma entt_invalid_list: "hn_invalid (list_assn A) l l' ⟹⇩t hn_ctxt (list_assn (invalid_assn A)) l l'"
apply (induct A l l' rule: list_assn.induct)
applyS simp
subgoal
apply1 (simp add: hn_ctxt_def cong del: invalid_assn_cong)
apply1 (rule entt_trans[OF invalid_list_split])
apply (rule entt_star_mono)
applyS simp
apply (rule entt_trans)
applyS assumption
applyS simp
done
applyS (simp add: hn_ctxt_def invalid_assn_def)
applyS (simp add: hn_ctxt_def invalid_assn_def)
done
lemmas invalid_list_merge[sepref_frame_merge_rules] = gen_merge_cons[OF entt_invalid_list]
lemma list_assn_comp[fcomp_norm_unfold]: "hr_comp (list_assn A) (⟨B⟩list_rel) = list_assn (hr_comp A B)"
proof (intro ext)
{ fix x l y m
have "hr_comp (list_assn A) (⟨B⟩list_rel) (x # l) (y # m) =
hr_comp A B x y * hr_comp (list_assn A) (⟨B⟩list_rel) l m"
by (sep_auto
simp: hr_comp_def list_rel_split_left_iff
intro!: ent_ex_preI ent_iffI)
} note aux = this
fix l li
show "hr_comp (list_assn A) (⟨B⟩list_rel) l li = list_assn (hr_comp A B) l li"
apply (induction l arbitrary: li; case_tac li; intro ent_iffI)
apply (sep_auto simp: hr_comp_def; fail)+
by (simp_all add: aux)
qed
lemma hn_ctxt_eq: "A x y = z ⟹ hn_ctxt A x y = z" by (simp add: hn_ctxt_def)
lemmas hn_ctxt_list = hn_ctxt_eq[of "list_assn A" for A]
lemma hn_case_list[sepref_prep_comb_rule, sepref_comb_rules]:
fixes p p' P
defines [simp]: "INVE ≡ hn_invalid (list_assn P) p p'"
assumes FR: "Γ ⟹⇩t hn_ctxt (list_assn P) p p' * F"
assumes Rn: "p=[] ⟹ hn_refine (hn_ctxt (list_assn P) p p' * F) f1' (hn_ctxt XX1 p p' * Γ1') R f1"
assumes Rs: "⋀x l x' l'. ⟦ p=x#l; p'=x'#l' ⟧ ⟹
hn_refine (hn_ctxt P x x' * hn_ctxt (list_assn P) l l' * INVE * F) (f2' x' l') (hn_ctxt P1' x x' * hn_ctxt (list_assn P2') l l' * hn_ctxt XX2 p p' * Γ2') R (f2 x l)"
assumes MERGE1[unfolded hn_ctxt_def]: "⋀x x'. hn_ctxt P1' x x' ∨⇩A hn_ctxt P2' x x' ⟹⇩t hn_ctxt P' x x'"
assumes MERGE2: "Γ1' ∨⇩A Γ2' ⟹⇩t Γ'"
shows "hn_refine Γ (case_list f1' f2' p') (hn_ctxt (list_assn P') p p' * Γ') R (case_list$f1$(λ⇩2x l. f2 x l)$p)"
apply (rule hn_refine_cons_pre[OF FR])
apply1 extract_hnr_invalids
apply (cases p; cases p'; simp add: list_assn.simps[THEN hn_ctxt_list])
subgoal
apply (rule hn_refine_cons[OF _ Rn _ entt_refl]; assumption?)
applyS (simp add: hn_ctxt_def)
apply (subst mult.commute, rule entt_fr_drop)
apply (rule entt_trans[OF _ MERGE2])
apply (simp add: ent_disjI1' ent_disjI2')
done
subgoal
apply (rule hn_refine_cons[OF _ Rs _ entt_refl]; assumption?)
applyS (simp add: hn_ctxt_def)
apply (rule entt_star_mono)
apply1 (rule entt_fr_drop)
apply (rule entt_star_mono)
apply1 (simp add: hn_ctxt_def)
apply1 (rule entt_trans[OF _ MERGE1])
applyS (simp)
apply1 (simp add: hn_ctxt_def)
apply (rule list_assn_monot)
apply1 (rule entt_trans[OF _ MERGE1])
applyS (simp)
apply1 (rule entt_trans[OF _ MERGE2])
applyS (simp)
done
done
lemma hn_Nil[sepref_fr_rules]:
"hn_refine emp (return []) emp (list_assn P) (RETURN$[])"
unfolding hn_refine_def
by sep_auto
lemma hn_Cons[sepref_fr_rules]: "hn_refine (hn_ctxt P x x' * hn_ctxt (list_assn P) xs xs')
(return (x'#xs')) (hn_invalid P x x' * hn_invalid (list_assn P) xs xs') (list_assn P)
(RETURN$((#) $x$xs))"
unfolding hn_refine_def
apply (sep_auto simp: hn_ctxt_def)
apply (rule ent_frame_fwd[OF invalidate_clone'[of P]], frame_inference)
apply (rule ent_frame_fwd[OF invalidate_clone'[of "list_assn P"]], frame_inference)
apply solve_entails
done
lemma list_assn_aux_len:
"list_assn P l l' = list_assn P l l' * ↑(length l = length l')"
apply (induct P≡P l l' rule: list_assn.induct)
apply simp_all
subgoal for a as c cs
by (erule_tac t="list_assn P as cs" in subst[OF sym]) simp
done
lemma list_assn_aux_eqlen_simp:
"vassn_tag (list_assn P l l') ⟹ length l' = length l"
"h ⊨ (list_assn P l l') ⟹ length l' = length l"
apply (subst (asm) list_assn_aux_len; auto simp: vassn_tag_def)+
done
lemma hn_append[sepref_fr_rules]: "hn_refine (hn_ctxt (list_assn P) l1 l1' * hn_ctxt (list_assn P) l2 l2')
(return (l1'@l2')) (hn_invalid (list_assn P) l1 l1' * hn_invalid (list_assn P) l2 l2') (list_assn P)
(RETURN$((@) $l1$l2))"
apply rule
apply (sep_auto simp: hn_ctxt_def)
apply (subst list_assn_aux_len)
apply (sep_auto)
apply (rule ent_frame_fwd[OF invalidate_clone'[of "list_assn P"]], frame_inference)
apply (rule ent_frame_fwd[OF invalidate_clone'[of "list_assn P"]], frame_inference)
apply solve_entails
done
lemma list_assn_aux_cons_conv1:
"list_assn R (a#l) m = (∃⇩Ab m'. R a b * list_assn R l m' * ↑(m=b#m'))"
apply (cases m)
apply sep_auto
apply (sep_auto intro!: ent_iffI)
done
lemma list_assn_aux_cons_conv2:
"list_assn R l (b#m) = (∃⇩Aa l'. R a b * list_assn R l' m * ↑(l=a#l'))"
apply (cases l)
apply sep_auto
apply (sep_auto intro!: ent_iffI)
done
lemmas list_assn_aux_cons_conv = list_assn_aux_cons_conv1 list_assn_aux_cons_conv2
lemma list_assn_aux_append_conv1:
"list_assn R (l1@l2) m = (∃⇩Am1 m2. list_assn R l1 m1 * list_assn R l2 m2 * ↑(m=m1@m2))"
apply (induction l1 arbitrary: m)
apply (sep_auto intro!: ent_iffI)
apply (sep_auto intro!: ent_iffI simp: list_assn_aux_cons_conv)
done
lemma list_assn_aux_append_conv2:
"list_assn R l (m1@m2) = (∃⇩Al1 l2. list_assn R l1 m1 * list_assn R l2 m2 * ↑(l=l1@l2))"
apply (induction m1 arbitrary: l)
apply (sep_auto intro!: ent_iffI)
apply (sep_auto intro!: ent_iffI simp: list_assn_aux_cons_conv)
done
lemmas list_assn_aux_append_conv = list_assn_aux_append_conv1 list_assn_aux_append_conv2
declare param_upt[sepref_import_param]
subsection ‹Sum-Type›
fun sum_assn :: "('ai ⇒ 'a ⇒ assn) ⇒ ('bi ⇒ 'b ⇒ assn) ⇒ ('ai+'bi) ⇒ ('a+'b) ⇒ assn" where
"sum_assn A B (Inl ai) (Inl a) = A ai a"
| "sum_assn A B (Inr bi) (Inr b) = B bi b"
| "sum_assn A B _ _ = false"
notation sum_assn (infixr "+⇩a" 67)
lemma sum_assn_pure[safe_constraint_rules]: "⟦is_pure A; is_pure B⟧ ⟹ is_pure (sum_assn A B)"
apply (auto simp: is_pure_iff_pure_assn)
apply (rename_tac x x')
apply (case_tac x; case_tac x'; simp add: pure_def)
done
lemma sum_assn_id[simp]: "sum_assn id_assn id_assn = id_assn"
apply (intro ext)
subgoal for x y by (cases x; cases y; simp add: pure_def)
done
lemma sum_assn_pure_conv[simp]: "sum_assn (pure A) (pure B) = pure (⟨A,B⟩sum_rel)"
apply (intro ext)
subgoal for a b by (cases a; cases b; auto simp: pure_def)
done
lemma sum_match_cong[sepref_frame_match_rules]:
"⟦
⋀x y. ⟦e = Inl x; e'=Inl y⟧ ⟹ hn_ctxt A x y ⟹⇩t hn_ctxt A' x y;
⋀x y. ⟦e = Inr x; e'=Inr y⟧ ⟹ hn_ctxt B x y ⟹⇩t hn_ctxt B' x y
⟧ ⟹ hn_ctxt (sum_assn A B) e e' ⟹⇩t hn_ctxt (sum_assn A' B') e e'"
by (cases e; cases e'; simp add: hn_ctxt_def entt_star_mono)
lemma enum_merge_cong[sepref_frame_merge_rules]:
assumes "⋀x y. ⟦e=Inl x; e'=Inl y⟧ ⟹ hn_ctxt A x y ∨⇩A hn_ctxt A' x y ⟹⇩t hn_ctxt Am x y"
assumes "⋀x y. ⟦e=Inr x; e'=Inr y⟧ ⟹ hn_ctxt B x y ∨⇩A hn_ctxt B' x y ⟹⇩t hn_ctxt Bm x y"
shows "hn_ctxt (sum_assn A B) e e' ∨⇩A hn_ctxt (sum_assn A' B') e e' ⟹⇩t hn_ctxt (sum_assn Am Bm) e e'"
apply (rule entt_disjE)
apply (rule sum_match_cong)
apply (rule entt_disjD1[OF assms(1)]; simp)
apply (rule entt_disjD1[OF assms(2)]; simp)
apply (rule sum_match_cong)
apply (rule entt_disjD2[OF assms(1)]; simp)
apply (rule entt_disjD2[OF assms(2)]; simp)
done
lemma entt_invalid_sum: "hn_invalid (sum_assn A B) e e' ⟹⇩t hn_ctxt (sum_assn (invalid_assn A) (invalid_assn B)) e e'"
apply (simp add: hn_ctxt_def invalid_assn_def[abs_def])
apply (rule enttI)
apply clarsimp
apply (cases e; cases e'; auto simp: mod_star_conv pure_def)
done
lemmas invalid_sum_merge[sepref_frame_merge_rules] = gen_merge_cons[OF entt_invalid_sum]
sepref_register Inr Inl
lemma [sepref_fr_rules]: "(return o Inl,RETURN o Inl) ∈ A⇧d →⇩a sum_assn A B"
by sepref_to_hoare sep_auto
lemma [sepref_fr_rules]: "(return o Inr,RETURN o Inr) ∈ B⇧d →⇩a sum_assn A B"
by sepref_to_hoare sep_auto
sepref_register case_sum
text ‹In the monadify phase, this eta-expands to make visible all required arguments›
lemma [sepref_monadify_arity]: "case_sum ≡ λ⇩2f1 f2 x. SP case_sum$(λ⇩2x. f1$x)$(λ⇩2x. f2$x)$x"
by simp
text ‹This determines an evaluation order for the first-order operands›
lemma [sepref_monadify_comb]: "case_sum$f1$f2$x ≡ (⤜) $(EVAL$x)$(λ⇩2x. SP case_sum$f1$f2$x)" by simp
text ‹This enables translation of the case-distinction in a non-monadic context.›
lemma [sepref_monadify_comb]: "EVAL$(case_sum$(λ⇩2x. f1 x)$(λ⇩2x. f2 x)$x)
≡ (⤜) $(EVAL$x)$(λ⇩2x. SP case_sum$(λ⇩2x. EVAL $ f1 x)$(λ⇩2x. EVAL $ f2 x)$x)"
apply (rule eq_reflection)
by (simp split: sum.splits)
text ‹Auxiliary lemma, to lift simp-rule over ‹hn_ctxt››
lemma sum_assn_ctxt: "sum_assn A B x y = z ⟹ hn_ctxt (sum_assn A B) x y = z"
by (simp add: hn_ctxt_def)
text ‹The cases lemma first extracts the refinement for the datatype from the precondition.
Next, it generate proof obligations to refine the functions for every case.
Finally the postconditions of the refinement are merged.
Note that we handle the
destructed values separately, to allow reconstruction of the original datatype after the case-expression.
Moreover, we provide (invalidated) versions of the original compound value to the cases,
which allows access to pure compound values from inside the case.
›
lemma sum_cases_hnr:
fixes A B e e'
defines [simp]: "INVe ≡ hn_invalid (sum_assn A B) e e'"
assumes FR: "Γ ⟹⇩t hn_ctxt (sum_assn A B) e e' * F"
assumes E1: "⋀x1 x1a. ⟦e = Inl x1; e' = Inl x1a⟧ ⟹ hn_refine (hn_ctxt A x1 x1a * INVe * F) (f1' x1a) (hn_ctxt A' x1 x1a * hn_ctxt XX1 e e' * Γ1') R (f1 x1)"
assumes E2: "⋀x2 x2a. ⟦e = Inr x2; e' = Inr x2a⟧ ⟹ hn_refine (hn_ctxt B x2 x2a * INVe * F) (f2' x2a) (hn_ctxt B' x2 x2a * hn_ctxt XX2 e e' * Γ2') R (f2 x2)"
assumes MERGE[unfolded hn_ctxt_def]: "Γ1' ∨⇩A Γ2' ⟹⇩t Γ'"
shows "hn_refine Γ (case_sum f1' f2' e') (hn_ctxt (sum_assn A' B') e e' * Γ') R (case_sum$(λ⇩2x. f1 x)$(λ⇩2x. f2 x)$e)"
apply (rule hn_refine_cons_pre[OF FR])
apply1 extract_hnr_invalids
apply (cases e; cases e'; simp add: sum_assn.simps[THEN sum_assn_ctxt])
subgoal
apply (rule hn_refine_cons[OF _ E1 _ entt_refl]; assumption?)
applyS (simp add: hn_ctxt_def)
apply (rule entt_star_mono)
apply1 (rule entt_fr_drop)
applyS (simp add: hn_ctxt_def entt_disjI1' entt_disjI2')
apply1 (rule entt_trans[OF _ MERGE])
applyS (simp add: entt_disjI1' entt_disjI2')
done
subgoal
apply (rule hn_refine_cons[OF _ E2 _ entt_refl]; assumption?)
applyS (simp add: hn_ctxt_def)
apply (rule entt_star_mono)
apply1 (rule entt_fr_drop)
applyS (simp add: hn_ctxt_def entt_disjI1' entt_disjI2')
apply1 (rule entt_trans[OF _ MERGE])
applyS (simp add: entt_disjI1' entt_disjI2')
done
done
text ‹After some more preprocessing (adding extra frame-rules for non-atomic postconditions,
and splitting the merge-terms into binary merges), this rule can be registered›
lemmas [sepref_comb_rules] = sum_cases_hnr[sepref_prep_comb_rule]
sepref_register isl projl projr
lemma isl_hnr[sepref_fr_rules]: "(return o isl,RETURN o isl) ∈ (sum_assn A B)⇧k →⇩a bool_assn"
apply sepref_to_hoare
subgoal for a b by (cases a; cases b; sep_auto)
done
lemma projl_hnr[sepref_fr_rules]: "(return o projl,RETURN o projl) ∈ [isl]⇩a (sum_assn A B)⇧d → A"
apply sepref_to_hoare
subgoal for a b by (cases a; cases b; sep_auto)
done
lemma projr_hnr[sepref_fr_rules]: "(return o projr,RETURN o projr) ∈ [Not o isl]⇩a (sum_assn A B)⇧d → B"
apply sepref_to_hoare
subgoal for a b by (cases a; cases b; sep_auto)
done
subsection ‹String Literals›
sepref_register "PR_CONST String.empty_literal"
lemma empty_literal_hnr [sepref_import_param]:
"(String.empty_literal, PR_CONST String.empty_literal) ∈ Id"
by simp
lemma empty_literal_pat [def_pat_rules]:
"String.empty_literal ≡ UNPROTECT String.empty_literal"
by simp
context
fixes b0 b1 b2 b3 b4 b5 b6 :: bool
and s :: String.literal
begin
sepref_register "PR_CONST (String.Literal b0 b1 b2 b3 b4 b5 b6 s)"
lemma Literal_hnr [sepref_import_param]:
"(String.Literal b0 b1 b2 b3 b4 b5 b6 s,
PR_CONST (String.Literal b0 b1 b2 b3 b4 b5 b6 s)) ∈ Id"
by simp
end
lemma Literal_pat [def_pat_rules]:
"String.Literal $ b0 $ b1 $ b2 $ b3 $ b4 $ b5 $ b6 $ s ≡
UNPROTECT (String.Literal $ b0 $ b1 $ b2 $ b3 $ b4 $ b5 $ b6 $ s)"
by simp
end
Theory Sepref_Foreach
section ‹Setup for Foreach Combinator›
theory Sepref_Foreach
imports Sepref_HOL_Bindings "Lib/Pf_Add" "HOL-Library.Rewrite"
begin
subsection "Foreach Loops"
subsubsection "Monadic Version of Foreach"
text ‹
In a first step, we define a version of foreach where the continuation condition
is also monadic, and show that it is equal to the standard version for
continuation conditions of the form ‹λx. RETURN (c x)›
›
definition "FOREACH_inv xs Φ s ≡
case s of (it, σ) ⇒ ∃xs'. xs = xs' @ it ∧ Φ (set it) σ"
definition "monadic_FOREACH R Φ S c f σ0 ≡ do {
ASSERT (finite S);
xs0 ← it_to_sorted_list R S;
(_,σ) ← RECT (λW (xs,σ). do {
ASSERT (FOREACH_inv xs0 Φ (xs,σ));
if xs≠[] then do {
b ← c σ;
if b then
FOREACH_body f (xs,σ) ⤜ W
else
RETURN (xs,σ)
} else RETURN (xs,σ)
}) (xs0,σ0);
RETURN σ
}"
lemma FOREACH_oci_to_monadic:
"FOREACHoci R Φ S c f σ0 = monadic_FOREACH R Φ S (λσ. RETURN (c σ)) f σ0"
unfolding FOREACHoci_def monadic_FOREACH_def WHILEIT_def WHILEI_body_def
unfolding it_to_sorted_list_def FOREACH_cond_def FOREACH_inv_def
apply simp
apply (fo_rule arg_cong[THEN cong] | rule refl ext)+
apply (simp split: prod.split)
apply (rule refl)+
done
text ‹Next, we define a characterization w.r.t. ‹nfoldli››
definition "monadic_nfoldli l c f s ≡ RECT (λD (l,s). case l of
[] ⇒ RETURN s
| x#ls ⇒ do {
b ← c s;
if b then do { s'←f x s; D (ls,s')} else RETURN s
}
) (l,s)"
lemma monadic_nfoldli_eq:
"monadic_nfoldli l c f s = (
case l of
[] ⇒ RETURN s
| x#ls ⇒ do {
b←c s;
if b then f x s ⤜ monadic_nfoldli ls c f else RETURN s
}
)"
apply (subst monadic_nfoldli_def)
apply (subst RECT_unfold)
apply (tagged_solver)
apply (subst monadic_nfoldli_def[symmetric])
apply simp
done
lemma monadic_nfoldli_simp[simp]:
"monadic_nfoldli [] c f s = RETURN s"
"monadic_nfoldli (x#ls) c f s = do {
b←c s;
if b then f x s ⤜ monadic_nfoldli ls c f else RETURN s
}"
apply (subst monadic_nfoldli_eq, simp)
apply (subst monadic_nfoldli_eq, simp)
done
lemma nfoldli_to_monadic:
"nfoldli l c f = monadic_nfoldli l (λx. RETURN (c x)) f"
apply (induct l)
apply auto
done
definition "nfoldli_alt l c f s ≡ RECT (λD (l,s). case l of
[] ⇒ RETURN s
| x#ls ⇒ do {
let b = c s;
if b then do { s'←f x s; D (ls,s')} else RETURN s
}
) (l,s)"
lemma nfoldli_alt_eq:
"nfoldli_alt l c f s = (
case l of
[] ⇒ RETURN s
| x#ls ⇒ do {let b=c s; if b then f x s ⤜ nfoldli_alt ls c f else RETURN s}
)"
apply (subst nfoldli_alt_def)
apply (subst RECT_unfold)
apply (tagged_solver)
apply (subst nfoldli_alt_def[symmetric])
apply simp
done
lemma nfoldli_alt_simp[simp]:
"nfoldli_alt [] c f s = RETURN s"
"nfoldli_alt (x#ls) c f s = do {
let b = c s;
if b then f x s ⤜ nfoldli_alt ls c f else RETURN s
}"
apply (subst nfoldli_alt_eq, simp)
apply (subst nfoldli_alt_eq, simp)
done
lemma nfoldli_alt:
"(nfoldli::'a list ⇒ ('b ⇒ bool) ⇒ ('a ⇒ 'b ⇒ 'b nres) ⇒ 'b ⇒ 'b nres)
= nfoldli_alt"
proof (intro ext)
fix l::"'a list" and c::"'b ⇒ bool" and f::"'a ⇒ 'b ⇒ 'b nres" and s :: 'b
have "nfoldli l c f = nfoldli_alt l c f"
by (induct l) auto
thus "nfoldli l c f s = nfoldli_alt l c f s" by simp
qed
lemma monadic_nfoldli_rec:
"monadic_nfoldli x' c f σ
≤⇓Id (REC⇩T
(λW (xs, σ).
ASSERT (FOREACH_inv xs0 I (xs, σ)) ⤜
(λ_. if xs = [] then RETURN (xs, σ)
else c σ ⤜
(λb. if b then FOREACH_body f (xs, σ) ⤜ W
else RETURN (xs, σ))))
(x', σ) ⤜
(λ(_, y). RETURN y))"
apply (induct x' arbitrary: σ)
apply (subst RECT_unfold, refine_mono)
apply (simp)
apply (rule le_ASSERTI)
apply simp
apply (subst RECT_unfold, refine_mono)
apply (subst monadic_nfoldli_simp)
apply (simp del: conc_Id cong: if_cong)
apply refine_rcg
apply simp
apply (clarsimp simp add: FOREACH_body_def)
apply (rule_tac R="br (Pair x') (λ_. True)" in intro_prgR)
apply (simp add: pw_le_iff refine_pw_simps br_def)
apply (rule order_trans)
apply rprems
apply (simp add: br_def)
done
lemma monadic_nfoldli_arities[sepref_monadify_arity]:
"monadic_nfoldli ≡ λ⇩2s c f σ. SP (monadic_nfoldli)$s$(λ⇩2x. c$x)$(λ⇩2x σ. f$x$σ)$σ"
by (simp_all)
lemma monadic_nfoldli_comb[sepref_monadify_comb]:
"⋀s c f σ. (monadic_nfoldli)$s$c$f$σ ≡
Refine_Basic.bind$(EVAL$s)$(λ⇩2s. Refine_Basic.bind$(EVAL$σ)$(λ⇩2σ.
SP (monadic_nfoldli)$s$c$f$σ
))"
by (simp_all)
lemma list_rel_congD:
assumes A: "(li,l)∈⟨S⟩list_rel"
shows "(li,l)∈⟨S∩(set li×set l)⟩list_rel"
proof -
{
fix Si0 S0
assume "set li ⊆ Si0" "set l ⊆ S0"
with A have "(li,l)∈⟨S∩(Si0×S0)⟩list_rel"
by (induction rule: list_rel_induct) auto
} from this[OF order_refl order_refl] show ?thesis .
qed
lemma monadic_nfoldli_refine[refine]:
assumes L: "(li, l) ∈ ⟨S⟩list_rel"
and [simp]: "(si, s) ∈ R"
and CR[refine]: "⋀si s. (si,s)∈R ⟹ ci si ≤⇓bool_rel (c s)"
and [refine]: "⋀xi x si s. ⟦ (xi,x)∈S; x∈set l; (si,s)∈R; inres (c s) True ⟧ ⟹ fi xi si ≤ ⇓R (f x s)"
shows "monadic_nfoldli li ci fi si ≤ ⇓ R (monadic_nfoldli l c f s)"
supply RELATESI[of "S∩(set li×set l)", refine_dref_RELATES]
supply RELATESI[of R, refine_dref_RELATES]
unfolding monadic_nfoldli_def
apply (refine_rcg bind_refine')
apply refine_dref_type
apply (vc_solve simp: list_rel_congD[OF L])
done
lemma monadic_FOREACH_itsl:
fixes R I tsl
shows
"do { l ← it_to_sorted_list R s; monadic_nfoldli l c f σ }
≤ monadic_FOREACH R I s c f σ"
apply (rule refine_IdD)
unfolding monadic_FOREACH_def it_to_sorted_list_def
apply (refine_rcg)
apply simp
apply (rule monadic_nfoldli_rec[simplified])
done
lemma FOREACHoci_itsl:
fixes R I tsl
shows
"do { l ← it_to_sorted_list R s; nfoldli l c f σ }
≤ FOREACHoci R I s c f σ"
apply (rule refine_IdD)
unfolding FOREACHoci_def it_to_sorted_list_def
apply refine_rcg
apply simp
apply (rule nfoldli_while)
done
lemma [def_pat_rules]:
"FOREACHc ≡ PR_CONST (FOREACHoci (λ_ _. True) (λ_ _. True))"
"FOREACHci$I ≡ PR_CONST (FOREACHoci (λ_ _. True) I)"
"FOREACHi$I ≡ λ⇩2s. PR_CONST (FOREACHoci (λ_ _. True) I)$s$(λ⇩2x. True)"
"FOREACH ≡ FOREACHi$(λ⇩2_ _. True)"
by (simp_all add:
FOREACHci_def FOREACHi_def[abs_def] FOREACHc_def FOREACH_def[abs_def])
term "FOREACHoci R I"
lemma id_FOREACHoci[id_rules]: "PR_CONST (FOREACHoci R I) ::⇩i
TYPE('c set ⇒ ('d ⇒ bool) ⇒ ('c ⇒ 'd ⇒ 'd nres) ⇒ 'd ⇒ 'd nres)"
by simp
text ‹We set up the monadify-phase such that all FOREACH-loops get
rewritten to the monadic version of FOREACH›
lemma FOREACH_arities[sepref_monadify_arity]:
"PR_CONST (FOREACHoci R I) ≡ λ⇩2s c f σ. SP (PR_CONST (FOREACHoci R I))$s$(λ⇩2x. c$x)$(λ⇩2x σ. f$x$σ)$σ"
by (simp_all)
lemma FOREACHoci_comb[sepref_monadify_comb]:
"⋀s c f σ. (PR_CONST (FOREACHoci R I))$s$(λ⇩2x. c x)$f$σ ≡
Refine_Basic.bind$(EVAL$s)$(λ⇩2s. Refine_Basic.bind$(EVAL$σ)$(λ⇩2σ.
SP (PR_CONST (monadic_FOREACH R I))$s$(λ⇩2x. (EVAL$(c x)))$f$σ
))"
by (simp_all add: FOREACH_oci_to_monadic)
subsubsection "Imperative Version of nfoldli"
text ‹We define an imperative version of ‹nfoldli›. It is the
equivalent to the monadic version in the nres-monad›
definition "imp_nfoldli l c f s ≡ heap.fixp_fun (λD (l,s). case l of
[] ⇒ return s
| x#ls ⇒ do {
b←c s;
if b then do { s'←f x s; D (ls,s')} else return s
}
) (l,s)"
declare imp_nfoldli_def[code del]
lemma imp_nfoldli_simps[simp,code]:
"imp_nfoldli [] c f s = return s"
"imp_nfoldli (x#ls) c f s = (do {
b ← c s;
if b then do {
s'←f x s;
imp_nfoldli ls c f s'
} else return s
})"
apply -
unfolding imp_nfoldli_def
apply (subst heap.mono_body_fixp)
apply pf_mono
apply simp
apply (subst heap.mono_body_fixp)
apply pf_mono
apply simp
done
lemma monadic_nfoldli_refine_aux:
assumes c_ref: "⋀s s'. hn_refine
(Γ * hn_ctxt Rs s' s)
(c s)
(Γ * hn_ctxt Rs s' s)
bool_assn
(c' s')"
assumes f_ref: "⋀x x' s s'. hn_refine
(Γ * hn_ctxt Rl x' x * hn_ctxt Rs s' s)
(f x s)
(Γ * hn_invalid Rl x' x * hn_invalid Rs s' s) Rs
(f' x' s')"
shows "hn_refine
(Γ * hn_ctxt (list_assn Rl) l' l * hn_ctxt Rs s' s)
(imp_nfoldli l c f s)
(Γ * hn_invalid (list_assn Rl) l' l * hn_invalid Rs s' s) Rs
(monadic_nfoldli l' c' f' s')"
applyF (induct p≡Rl l' l
arbitrary: s s'
rule: list_assn.induct)
applyF simp
apply (rule hn_refine_cons_post)
apply (rule hn_refine_frame[OF hnr_RETURN_pass])
apply (tactic ‹Sepref_Frame.frame_tac (K (K no_tac)) @{context} 1›)
apply (simp add: hn_ctxt_def ent_true_drop invalid_assn_const)
solved
apply1 weaken_hnr_post
apply1 (simp only: imp_nfoldli_simps monadic_nfoldli_simp)
applyF (rule hnr_bind)
apply1 (rule hn_refine_frame[OF c_ref])
applyS (tactic ‹Sepref_Frame.frame_tac (K (K no_tac)) @{context} 1›)
applyF (rule hnr_If)
applyS (tactic ‹Sepref_Frame.frame_tac (K (K no_tac)) @{context} 1›)
applyF (rule hnr_bind)
apply1 (rule hn_refine_frame[OF f_ref])
apply1 (simp add: assn_assoc)
apply1 (rule ent_imp_entt)
apply1 (fr_rot 1, rule fr_refl)
apply1 (fr_rot 2, rule fr_refl)
apply1 (fr_rot 1, rule fr_refl)
applyS (rule ent_refl)
applyF (rule hn_refine_frame)
applyS rprems
apply1 (simp add: assn_assoc)
apply1 (rule ent_imp_entt)
apply (rule fr_refl)
apply1 (fr_rot 3, rule fr_refl)
apply1 (fr_rot 3, rule fr_refl)
applyS (rule ent_refl)
solved
apply simp
applyS (tactic ‹Sepref_Frame.frame_tac (K (K no_tac)) @{context} 1›)
solved
apply1 (rule hn_refine_frame[OF hnr_RETURN_pass])
applyS (tactic ‹Sepref_Frame.frame_tac (K (K no_tac)) @{context} 1›)
apply1 (simp add: assn_assoc)
applyS (tactic ‹Sepref_Frame.merge_tac (K (K no_tac)) @{context} 1›)
solved
apply (rule enttI)
apply (fr_rot_rhs 1)
apply (fr_rot 3, rule fr_refl)
applyS (fr_rot 3, rule ent_star_mono[rotated]; sep_auto simp: hn_ctxt_def)
solved
applyS (simp add: hn_ctxt_def invalid_assn_def)
applyS (rule, sep_auto)
solved
done
lemma hn_monadic_nfoldli:
assumes FR: "P ⟹⇩t Γ * hn_ctxt (list_assn Rl) l' l * hn_ctxt Rs s' s"
assumes c_ref: "⋀s s'. hn_refine
(Γ * hn_ctxt Rs s' s)
(c s)
(Γ * hn_ctxt Rs s' s)
bool_assn
(c'$s')"
assumes f_ref: "⋀x x' s s'. hn_refine
(Γ * hn_ctxt Rl x' x * hn_ctxt Rs s' s)
(f x s)
(Γ * hn_invalid Rl x' x * hn_invalid Rs s' s) Rs
(f'$x'$s')"
shows "hn_refine
P
(imp_nfoldli l c f s)
(Γ * hn_invalid (list_assn Rl) l' l * hn_invalid Rs s' s)
Rs
(monadic_nfoldli$l'$c'$f'$s')
"
apply (rule hn_refine_cons_pre[OF FR])
unfolding APP_def
apply (rule monadic_nfoldli_refine_aux)
apply (rule c_ref[unfolded APP_def])
apply (rule f_ref[unfolded APP_def])
done
definition
imp_foreach :: "('b ⇒ 'c list Heap) ⇒ 'b ⇒ ('a ⇒ bool Heap) ⇒ ('c ⇒ 'a ⇒ 'a Heap) ⇒ 'a ⇒ 'a Heap"
where
"imp_foreach tsl s c f σ ≡ do { l ← tsl s; imp_nfoldli l c f σ}"
lemma heap_fixp_mono[partial_function_mono]:
assumes [partial_function_mono]:
"⋀x d. mono_Heap (λxa. B x xa d)"
"⋀Z xa. mono_Heap (λa. B a Z xa)"
shows "mono_Heap (λx. heap.fixp_fun (λD σ. B x D σ) σ)"
apply rule
apply (rule ccpo.fixp_mono[OF heap.ccpo, THEN fun_ordD])
apply (rule mono_fun_fun_cnv, erule thin_rl, pf_mono)+
apply (rule fun_ordI)
apply (erule monotoneD[of "fun_ord Heap_ord" Heap_ord, rotated])
apply pf_mono
done
lemma imp_nfoldli_mono[partial_function_mono]:
assumes [partial_function_mono]: "⋀x σ. mono_Heap (λfa. f fa x σ)"
shows "mono_Heap (λx. imp_nfoldli l c (f x) σ)"
unfolding imp_nfoldli_def
by pf_mono
lemma imp_foreach_mono[partial_function_mono]:
assumes [partial_function_mono]: "⋀x σ. mono_Heap (λfa. f fa x σ)"
shows "mono_Heap (λx. imp_foreach tsl l c (f x) σ)"
unfolding imp_foreach_def
by pf_mono
lemmas [sepref_opt_simps] = imp_foreach_def
definition
"IS_TO_SORTED_LIST Ω Rs Rk tsl ≡ (tsl,it_to_sorted_list Ω) ∈ (Rs)⇧k →⇩a list_assn Rk"
lemma IS_TO_SORTED_LISTI:
assumes "(tsl,PR_CONST (it_to_sorted_list Ω)) ∈ (Rs)⇧k →⇩a list_assn Rk"
shows "IS_TO_SORTED_LIST Ω Rs Rk tsl"
using assms unfolding IS_TO_SORTED_LIST_def PR_CONST_def .
lemma hn_monadic_FOREACH[sepref_comb_rules]:
assumes "INDEP Rk" "INDEP Rs" "INDEP Rσ"
assumes FR: "P ⟹⇩t Γ * hn_ctxt Rs s' s * hn_ctxt Rσ σ' σ"
assumes STL: "GEN_ALGO tsl (IS_TO_SORTED_LIST ordR Rs Rk)"
assumes c_ref: "⋀σ σ'. hn_refine
(Γ * hn_ctxt Rs s' s * hn_ctxt Rσ σ' σ)
(c σ)
(Γc σ' σ)
bool_assn
(c' σ')"
assumes C_FR:
"⋀σ' σ. TERM monadic_FOREACH ⟹
Γc σ' σ ⟹⇩t Γ * hn_ctxt Rs s' s * hn_ctxt Rσ σ' σ"
assumes f_ref: "⋀x' x σ' σ. hn_refine
(Γ * hn_ctxt Rs s' s * hn_ctxt Rk x' x * hn_ctxt Rσ σ' σ)
(f x σ)
(Γf x' x σ' σ) Rσ
(f' x' σ')"
assumes F_FR: "⋀x' x σ' σ. TERM monadic_FOREACH ⟹ Γf x' x σ' σ ⟹⇩t
Γ * hn_ctxt Rs s' s * hn_ctxt Pfx x' x * hn_ctxt Pfσ σ' σ"
shows "hn_refine
P
(imp_foreach tsl s c f σ)
(Γ * hn_ctxt Rs s' s * hn_invalid Rσ σ' σ)
Rσ
((PR_CONST (monadic_FOREACH ordR I))
$s'$(λ⇩2σ'. c' σ')$(λ⇩2x' σ'. f' x' σ')$σ'
)"
proof -
from STL have STL: "(tsl,it_to_sorted_list ordR) ∈ (Rs)⇧k →⇩a list_assn Rk"
unfolding GEN_ALGO_def IS_TO_SORTED_LIST_def by simp
show ?thesis
apply (rule hn_refine_cons_pre[OF FR])
apply weaken_hnr_post
unfolding APP_def PROTECT2_def PR_CONST_def imp_foreach_def
apply (rule hn_refine_ref[OF monadic_FOREACH_itsl])
apply (rule hn_refine_guessI)
apply (rule hnr_bind)
apply (rule hn_refine_frame)
apply (rule STL[to_hnr, unfolded APP_def])
apply (tactic ‹Sepref_Frame.frame_tac (K (K no_tac)) @{context} 1›)
apply (rule hn_monadic_nfoldli[unfolded APP_def])
apply (tactic ‹Sepref_Frame.frame_tac (K (K no_tac)) @{context} 1›)
apply (rule hn_refine_cons_post)
apply (rule c_ref[unfolded APP_def])
apply (rule C_FR)
apply (rule TERMI)
apply weaken_hnr_post
apply (rule hn_refine_cons_post)
apply (rule f_ref[unfolded APP_def])
apply (rule entt_trans[OF F_FR])
apply (rule TERMI)
applyS (tactic ‹Sepref_Frame.frame_tac (K (K no_tac)) @{context} 1›)
applyS (tactic ‹Sepref_Frame.frame_tac (K (K no_tac)) @{context} 1›)
apply simp
done
qed
lemma monadic_nfoldli_assert_aux:
assumes "set l ⊆ S"
shows "monadic_nfoldli l c (λx s. ASSERT (x∈S)⪢f x s) s = monadic_nfoldli l c f s"
using assms
apply (induction l arbitrary: s)
apply (auto simp: pw_eq_iff refine_pw_simps)
done
lemmas monadic_nfoldli_assert = monadic_nfoldli_assert_aux[OF order_refl]
lemma nfoldli_arities[sepref_monadify_arity]:
"nfoldli ≡ λ⇩2s c f σ. SP (nfoldli)$s$(λ⇩2x. c$x)$(λ⇩2x σ. f$x$σ)$σ"
by (simp_all)
lemma nfoldli_comb[sepref_monadify_comb]:
"⋀s c f σ. (nfoldli)$s$(λ⇩2x. c x)$f$σ ≡
Refine_Basic.bind$(EVAL$s)$(λ⇩2s. Refine_Basic.bind$(EVAL$σ)$(λ⇩2σ.
SP (monadic_nfoldli)$s$(λ⇩2x. (EVAL$(c x)))$f$σ
))"
by (simp_all add: nfoldli_to_monadic)
lemma monadic_nfoldli_refine_aux':
assumes SS: "set l' ⊆ S"
assumes c_ref: "⋀s s'. hn_refine
(Γ * hn_ctxt Rs s' s)
(c s)
(Γ * hn_ctxt Rs s' s)
bool_assn
(c' s')"
assumes f_ref: "⋀x x' s s'. ⟦x' ∈ S⟧ ⟹ hn_refine
(Γ * hn_ctxt Rl x' x * hn_ctxt Rs s' s)
(f x s)
(Γ * hn_ctxt Rl' x' x * hn_invalid Rs s' s) Rs
(f' x' s')"
assumes merge[sepref_frame_merge_rules]: "⋀x x'. hn_ctxt Rl' x' x ∨⇩A hn_ctxt Rl x' x ⟹⇩t hn_ctxt Rl'' x' x"
notes [sepref_frame_merge_rules] = merge_sat2[OF merge]
shows "hn_refine
(Γ * hn_ctxt (list_assn Rl) l' l * hn_ctxt Rs s' s)
(imp_nfoldli l c f s)
(Γ * hn_ctxt (list_assn Rl'') l' l * hn_invalid Rs s' s) Rs
(monadic_nfoldli l' c' f' s')"
apply1 (subst monadic_nfoldli_assert_aux[OF SS,symmetric])
applyF (induct p≡Rl l' l
arbitrary: s s'
rule: list_assn.induct)
applyF simp
apply (rule hn_refine_cons_post)
apply (rule hn_refine_frame[OF hnr_RETURN_pass])
apply (tactic ‹Sepref_Frame.frame_tac (K (K no_tac)) @{context} 1›)
apply (simp add: hn_ctxt_def ent_true_drop)
solved
apply (simp only: imp_nfoldli_simps monadic_nfoldli_simp)
apply (rule hnr_bind)
apply (rule hn_refine_frame[OF c_ref])
apply (tactic ‹Sepref_Frame.frame_tac (K (K no_tac)) @{context} 1›)
apply (rule hnr_If)
apply (tactic ‹Sepref_Frame.frame_tac (K (K no_tac)) @{context} 1›)
apply (simp only: nres_monad_laws)
apply (rule hnr_ASSERT)
apply (rule hnr_bind)
apply (rule hn_refine_frame[OF f_ref])
apply assumption
apply (simp add: assn_aci)
apply (rule ent_imp_entt)
apply (fr_rot_rhs 1)
apply (fr_rot 2)
apply (rule fr_refl)
apply (rule fr_refl)
apply (rule fr_refl)
apply (rule ent_refl)
applyF (rule hn_refine_frame)
applyS rprems
focus
apply (simp add: assn_aci)
apply (rule ent_imp_entt)
apply (fr_rot_rhs 1, rule fr_refl)
apply (fr_rot 2, rule fr_refl)
apply (fr_rot 1, rule fr_refl)
apply (rule ent_refl)
solved
solved
focus (simp add: assn_assoc)
apply (rule ent_imp_entt)
apply (rule fr_refl)
apply (rule ent_refl)
solved
apply1 (rule hn_refine_frame[OF hnr_RETURN_pass])
applyS (tactic ‹Sepref_Frame.frame_tac (K (K no_tac)) @{context} 1›)
apply1 (simp add: assn_assoc)
applyS (tactic ‹Sepref_Frame.merge_tac (K (K no_tac)) @{context} 1›)
apply simp
apply (rule ent_imp_entt)
apply solve_entails
apply (rule, sep_auto)
apply (rule, sep_auto)
solved
done
lemma hn_monadic_nfoldli_rl'[sepref_comb_rules]:
assumes "INDEP Rk" "INDEP Rσ"
assumes FR: "P ⟹⇩t Γ * hn_ctxt (list_assn Rk) s' s * hn_ctxt Rσ σ' σ"
assumes c_ref: "⋀σ σ'. hn_refine
(Γ * hn_ctxt Rσ σ' σ)
(c σ)
(Γc σ' σ)
bool_assn
(c' σ')"
assumes C_FR:
"⋀σ' σ. TERM monadic_nfoldli ⟹
Γc σ' σ ⟹⇩t Γ * hn_ctxt Rσ σ' σ"
assumes f_ref: "⋀x' x σ' σ. ⟦x'∈set s'⟧ ⟹ hn_refine
(Γ * hn_ctxt Rk x' x * hn_ctxt Rσ σ' σ)
(f x σ)
(Γf x' x σ' σ) Rσ
(f' x' σ')"
assumes F_FR: "⋀x' x σ' σ. TERM monadic_nfoldli ⟹ Γf x' x σ' σ ⟹⇩t
Γ * hn_ctxt Rk' x' x * hn_ctxt Pfσ σ' σ"
assumes MERGE: "⋀x x'. hn_ctxt Rk' x' x ∨⇩A hn_ctxt Rk x' x ⟹⇩t hn_ctxt Rk'' x' x"
shows "hn_refine
P
(imp_nfoldli s c f σ)
(Γ * hn_ctxt (list_assn Rk'') s' s * hn_invalid Rσ σ' σ)
Rσ
((monadic_nfoldli)
$s'$(λ⇩2σ'. c' σ')$(λ⇩2x' σ'. f' x' σ')$σ'
)"
unfolding APP_def PROTECT2_def PR_CONST_def
apply1 (rule hn_refine_cons_pre[OF FR])
apply1 weaken_hnr_post
applyF (rule hn_refine_cons[rotated])
applyF (rule monadic_nfoldli_refine_aux'[OF order_refl])
focus
apply (rule hn_refine_cons_post)
applyS (rule c_ref)
apply1 (rule entt_trans[OF C_FR[OF TERMI]])
applyS (rule entt_refl)
solved
apply1 weaken_hnr_post
applyF (rule hn_refine_cons_post)
applyS (rule f_ref; simp)
apply1 (rule entt_trans[OF F_FR[OF TERMI]])
applyS (tactic ‹Sepref_Frame.frame_tac (K (K no_tac)) @{context} 1›)
solved
apply (rule MERGE)
solved
applyS (tactic ‹Sepref_Frame.frame_tac (K (K no_tac)) @{context} 1›)
applyS (tactic ‹Sepref_Frame.frame_tac (K (K no_tac)) @{context} 1›)
applyS (tactic ‹Sepref_Frame.frame_tac (K (K no_tac)) @{context} 1›)
solved
done
lemma nfoldli_assert:
assumes "set l ⊆ S"
shows "nfoldli l c (λ x s. ASSERT (x ∈ S) ⪢ f x s) s = nfoldli l c f s"
using assms by (induction l arbitrary: s) (auto simp: pw_eq_iff refine_pw_simps)
lemmas nfoldli_assert' = nfoldli_assert[OF order.refl]
lemma fold_eq_nfoldli:
"RETURN (fold f l s) = nfoldli l (λ_. True) (λx s. RETURN (f x s)) s"
apply (induction l arbitrary: s) apply (auto) done
lemma fold_eq_nfoldli_assert:
"RETURN (fold f l s) = nfoldli l (λ_. True) (λx s. ASSERT (x∈set l) ⪢ RETURN (f x s)) s"
by (simp add: nfoldli_assert' fold_eq_nfoldli)
lemma fold_arity[sepref_monadify_arity]: "fold ≡ λ⇩2f l s. SP fold$(λ⇩2x s. f$x$s)$l$s" by auto
lemma monadify_plain_fold[sepref_monadify_comb]:
"EVAL$(fold$(λ⇩2x s. f x s)$l$s) ≡ (⤜)$(EVAL$l)$(λ⇩2l. (⤜)$(EVAL$s)$(λ⇩2s. nfoldli$l$(λ⇩2_. True)$(λ⇩2x s. EVAL$(f x s))$s))"
by (simp add: fold_eq_nfoldli)
lemma monadify_plain_fold_old_rl:
"EVAL$(fold$(λ⇩2x s. f x s)$l$s) ≡ (⤜)$(EVAL$l)$(λ⇩2l. (⤜)$(EVAL$s)$(λ⇩2s. nfoldli$l$(λ⇩2_. True)$(λ⇩2x s. PR_CONST (op_ASSERT_bind (x∈set l))$(EVAL$(f x s)))$s))"
by (simp add: fold_eq_nfoldli_assert)
text ‹foldli›
lemma foldli_eq_nfoldli:
"RETURN (foldli l c f s) = nfoldli l c (λx s. RETURN (f x s)) s"
by (induction l arbitrary: s) auto
lemma foldli_arities[sepref_monadify_arity]:
"foldli ≡ λ⇩2s c f σ. SP (foldli)$s$(λ⇩2x. c$x)$(λ⇩2x σ. f$x$σ)$σ"
by (simp_all)
lemma monadify_plain_foldli[sepref_monadify_comb]:
"EVAL$(foldli$l$c$(λ⇩2x s. f x s)$s) ≡
(⤜)$(EVAL$l)$
(λ⇩2l. (⤜)$(EVAL$s)$
(λ⇩2s. nfoldli$l$c$(λ⇩2x s. (EVAL$(f x s)))$s))"
by (simp add: foldli_eq_nfoldli)
subsubsection ‹Deforestation›
lemma nfoldli_filter_deforestation:
"nfoldli (filter P xs) c f s = nfoldli xs c (λx s. if P x then f x s else RETURN s) s"
apply (induction xs arbitrary: s)
by (auto simp: pw_eq_iff refine_pw_simps)
lemma extend_list_of_filtered_set:
assumes [simp, intro!]: "finite S"
and A: "distinct xs'" "set xs' = {x ∈ S. P x}"
obtains xs where "xs' = filter P xs" "distinct xs" "set xs = S"
proof -
obtain xs2 where "{x∈S. ¬P x} = set xs2" "distinct xs2"
using finite_distinct_list[where A="{x∈S. ¬P x}"] by auto
with A have "xs' = filter P (xs'@xs2)" "distinct (xs'@xs2)" "set (xs'@xs2) = S"
by (auto simp: filter_empty_conv)
from that[OF this] show ?thesis .
qed
lemma FOREACHc_filter_deforestation:
assumes FIN[simp, intro!]: "finite S"
shows "(FOREACHc {x∈S. P x} c f s)
= FOREACHc S c (λx s. if P x then f x s else RETURN s) s"
unfolding FOREACHc_def FOREACHci_def FOREACHoci_by_LIST_FOREACH LIST_FOREACH'_eq
LIST_FOREACH'_def it_to_sorted_list_def
subgoal
proof (induction rule: antisym[consumes 0, case_names 1 2])
case 1
then show ?case
apply (rule le_ASSERTI)
apply (rule ASSERT_leI, simp)
apply (rule intro_spec_refine[where R=Id, simplified]; clarsimp)
apply (rule extend_list_of_filtered_set[OF FIN _ sym], assumption, assumption)
subgoal for xs' xs
apply (rule rhs_step_bind_SPEC[where R=Id and x'="xs", simplified])
applyS simp
applyS (simp add: nfoldli_filter_deforestation)
done
done
next
case 2
then show ?case
apply (rule le_ASSERTI)
apply (rule ASSERT_leI, (simp; fail))
apply (rule intro_spec_refine[where R=Id, simplified]; clarsimp)
subgoal for xs
apply (rule rhs_step_bind_SPEC[where R=Id and x'="filter P xs", simplified])
apply simp
apply (simp add: nfoldli_filter_deforestation)
done
done
qed
done
lemma FOREACHc_filter_deforestation2:
assumes [simp]: "distinct xs"
shows "(FOREACHc (set (filter P xs)) c f s)
= FOREACHc (set xs) c (λx s. if P x then f x s else RETURN s) s"
using FOREACHc_filter_deforestation[of "set xs", simplified, folded set_filter]
.
subsection ‹For Loops›
partial_function (heap) imp_for :: "nat ⇒ nat ⇒ ('a ⇒ bool Heap) ⇒ (nat ⇒ 'a ⇒ 'a Heap) ⇒ 'a ⇒ 'a Heap" where
"imp_for i u c f s = (if i ≥ u then return s else do {ctn <- c s; if ctn then f i s ⤜ imp_for (i + 1) u c f else return s})"
declare imp_for.simps[code]
lemma [simp]:
"i ≥ u ⟹ imp_for i u c f s = return s"
"i < u ⟹ imp_for i u c f s = do {ctn <- c s; if ctn then f i s ⤜ imp_for (i + 1) u c f else return s}"
by (auto simp: imp_for.simps)
lemma imp_nfoldli_deforest[sepref_opt_simps]:
"imp_nfoldli [l..<u] c = imp_for l u c"
apply (intro ext)
subgoal for f s
apply (induction "u - l" arbitrary: l u s)
apply (simp add: upt_conv_Cons; fail)
apply (simp add: upt_conv_Cons)
apply (fo_rule arg_cong)
by (auto cong: if_cong)
done
partial_function (heap) imp_for' :: "nat ⇒ nat ⇒ (nat ⇒ 'a ⇒ 'a Heap) ⇒ 'a ⇒ 'a Heap" where
"imp_for' i u f s = (if i ≥ u then return s else f i s ⤜ imp_for' (i + 1) u f)"
declare imp_for'.simps[code]
lemma [simp]:
"i ≥ u ⟹ imp_for' i u f s = return s"
"i < u ⟹ imp_for' i u f s = f i s ⤜ imp_for' (i + 1) u f"
by (auto simp: imp_for'.simps)
lemma imp_for_imp_for'[sepref_opt_simps]:
"imp_for i u (λ _. return True) = imp_for' i u"
apply (intro ext)
subgoal for f s
apply (induction "u - i" arbitrary: i u s)
apply (simp; fail)
apply simp
apply (fo_rule arg_cong)
by auto
done
partial_function (heap) imp_for_down :: "nat ⇒ nat ⇒ ('a ⇒ bool Heap) ⇒ (nat ⇒ 'a ⇒ 'a Heap) ⇒ 'a ⇒ 'a Heap" where
"imp_for_down l i c f s = do {
let i = i - 1;
ctn ← c s;
if ctn then do {
s ← f i s;
if i>l then imp_for_down l i c f s else return s
} else return s
}"
declare imp_for_down.simps[code]
lemma imp_nfoldli_deforest_down[sepref_opt_simps]:
"imp_nfoldli (rev [l..<u]) c =
(λf s. if u≤l then return s else imp_for_down l u c f s)"
proof (intro ext)
fix f s
show "imp_nfoldli (rev [l..<u]) c f s =
(if l ≥ u then return s else imp_for_down l u c f s)"
proof cases
assume "l≥u" thus ?thesis by auto
next
assume "¬(l≥u)" hence "l<u" by auto
thus ?thesis
apply simp
proof (induction "u - l" arbitrary: u s)
case 0 thus ?case by auto
next
case (Suc u')
from Suc.prems Suc.hyps(2) have [simp]: "rev [l..<u] = (u-1)#rev [l..<u-1]"
apply simp
apply (subst upt_Suc_append[symmetric])
apply auto
done
show ?case using Suc.hyps(1)[of "u-1"] Suc.hyps(2) Suc.prems
apply (subst imp_for_down.simps)
apply (cases "l < u - Suc 0")
apply (auto simp: Let_def cong: if_cong)
done
qed
qed
qed
context begin
private fun imp_for_down_induction_scheme :: "nat ⇒ nat ⇒ unit" where
"imp_for_down_induction_scheme l i = (
let i=i-1 in
if i>l then
imp_for_down_induction_scheme l i
else ()
)"
partial_function (heap) imp_for_down' :: "nat ⇒ nat ⇒ (nat ⇒ 'a ⇒ 'a Heap) ⇒ 'a ⇒ 'a Heap" where
"imp_for_down' l i f s = do {
let i = i - 1;
s ← f i s;
if i>l then imp_for_down' l i f s else return s
}"
declare imp_for_down'.simps[code]
lemma imp_for_down_no_cond[sepref_opt_simps]:
"imp_for_down l u (λ_. return True) = imp_for_down' l u"
apply (induction l u rule: imp_for_down_induction_scheme.induct)
apply (intro ext)
apply (subst imp_for_down.simps)
apply (subst imp_for_down'.simps)
apply (simp cong: if_cong)
done
end
lemma imp_for'_rule:
assumes LESS: "l≤u"
assumes PRE: "P ⟹⇩A I l s"
assumes STEP: "⋀i s. ⟦ l≤i; i<u ⟧ ⟹ <I i s> f i s <I (i+1)>"
shows "<P> imp_for' l u f s <I u>"
apply (rule Hoare_Triple.cons_pre_rule[OF PRE])
using LESS
proof (induction arbitrary: s rule: inc_induct)
case base thus ?case by sep_auto
next
case (step k)
show ?case using step.hyps
by (sep_auto heap: STEP step.IH)
qed
text ‹This lemma is used to manually convert a fold to a loop over indices. ›
lemma fold_idx_conv: "fold f l s = fold (λi. f (l!i)) [0..<length l] s"
proof (induction l arbitrary: s rule: rev_induct)
case Nil thus ?case by simp
next
case (snoc x l)
{ fix x s
have "fold (λa. f ((l @ [x]) ! a)) [0..<length l] s = fold (λa. f (l ! a)) [0..<length l] s"
by (rule fold_cong) (simp_all add: nth_append)
}
with snoc show ?case by simp
qed
end
Theory Sepref_Improper
section ‹Ad-Hoc Solutions›
theory Sepref_Improper
imports
Sepref_Tool
Sepref_HOL_Bindings
Sepref_Foreach
Sepref_Intf_Util
begin
text ‹This theory provides some ad-hoc solutions to practical problems,
that, however, still need a more robust/clean solution›
subsection ‹Pure Higher-Order Functions›
text ‹Ad-Hoc way to support pure higher-order arguments›
definition pho_apply :: "('a ⇒ 'b) ⇒ 'a ⇒ 'b" where [code_unfold,simp]: "pho_apply f x = f x"
sepref_register pho_apply
lemmas fold_pho_apply = pho_apply_def[symmetric]
lemma pure_fun_refine[sepref_fr_rules]: "hn_refine
(hn_val (A→B) f fi * hn_val A x xi)
(return (pho_apply$fi$xi))
(hn_val (A→B) f fi * hn_val A x xi)
(pure B)
(RETURN$(pho_apply$f$x))"
by (sep_auto intro!: hn_refineI simp: pure_def hn_ctxt_def dest: fun_relD)
end
Theory Sepref_Chapter_IICF
chapter ‹The Imperative Isabelle Collection Framework›
text ‹The Imperative Isabelle Collection Framework provides
efficient imperative implementations of collection data structures.
›
theory Sepref_Chapter_IICF
imports Main
begin
end
Theory IICF_Set
section ‹Set Interface›
theory IICF_Set
imports "../../Sepref"
begin
subsection ‹Operations›
definition [simp]: "op_set_is_empty s ≡ s={}"
lemma op_set_is_empty_param[param]: "(op_set_is_empty,op_set_is_empty)∈⟨A⟩set_rel → bool_rel" by auto
context
notes [simp] = IS_LEFT_UNIQUE_def
begin
sepref_decl_op set_empty: "{}" :: "⟨A⟩set_rel" .
sepref_decl_op (no_def) set_is_empty: op_set_is_empty :: "⟨A⟩set_rel → bool_rel" .
sepref_decl_op set_member: "(∈)" :: "A → ⟨A⟩set_rel → bool_rel" where "IS_LEFT_UNIQUE A" "IS_RIGHT_UNIQUE A" .
sepref_decl_op set_insert: Set.insert :: "A → ⟨A⟩set_rel → ⟨A⟩set_rel" where "IS_RIGHT_UNIQUE A" .
sepref_decl_op set_delete: "λx s. s - {x}" :: "A → ⟨A⟩set_rel → ⟨A⟩set_rel"
where "IS_LEFT_UNIQUE A" "IS_RIGHT_UNIQUE A" .
sepref_decl_op set_union: "(∪)" :: "⟨A⟩set_rel → ⟨A⟩set_rel → ⟨A⟩set_rel" .
sepref_decl_op set_inter: "(∩)" :: "⟨A⟩set_rel → ⟨A⟩set_rel → ⟨A⟩set_rel" where "IS_LEFT_UNIQUE A" "IS_RIGHT_UNIQUE A" .
sepref_decl_op set_diff: "(-) ::_ set ⇒ _" :: "⟨A⟩set_rel → ⟨A⟩set_rel → ⟨A⟩set_rel" where "IS_LEFT_UNIQUE A" "IS_RIGHT_UNIQUE A" .
sepref_decl_op set_subseteq: "(⊆)" :: "⟨A⟩set_rel → ⟨A⟩set_rel → bool_rel" where "IS_LEFT_UNIQUE A" "IS_RIGHT_UNIQUE A" .
sepref_decl_op set_subset: "(⊂)" :: "⟨A⟩set_rel → ⟨A⟩set_rel → bool_rel" where "IS_LEFT_UNIQUE A" "IS_RIGHT_UNIQUE A" .
sepref_decl_op set_pick: "RES" :: "[λs. s≠{}]⇩f ⟨K⟩set_rel → K" by auto
end
subsection ‹Patterns›
lemma pat_set[def_pat_rules]:
"{} ≡ op_set_empty"
"(∈) ≡ op_set_member"
"Set.insert ≡ op_set_insert"
"(∪) ≡ op_set_union"
"(∩) ≡ op_set_inter"
"(-) ≡ op_set_diff"
"(⊆) ≡ op_set_subseteq"
"(⊂) ≡ op_set_subset"
by (auto intro!: eq_reflection)
lemma pat_set2[pat_rules]:
"(=) $s${} ≡ op_set_is_empty$s"
"(=) ${}$s ≡ op_set_is_empty$s"
"(-) $s$(Set.insert$x${}) ≡ op_set_delete$x$s"
"SPEC$(λ⇩2x. (∈) $x$s) ≡ op_set_pick s"
"RES$s ≡ op_set_pick s"
by (auto intro!: eq_reflection)
locale set_custom_empty =
fixes empty and op_custom_empty :: "'a set"
assumes op_custom_empty_def: "op_custom_empty = op_set_empty"
begin
sepref_register op_custom_empty :: "'ax set"
lemma fold_custom_empty:
"{} = op_custom_empty"
"op_set_empty = op_custom_empty"
"mop_set_empty = RETURN op_custom_empty"
unfolding op_custom_empty_def by simp_all
end
end
Theory IICF_List_SetO
section ‹Sets by Lists that Own their Elements›
theory IICF_List_SetO
imports "../Intf/IICF_Set"
begin
text ‹Minimal implementation, only supporting a few operations›
definition "lso_assn A ≡ hr_comp (list_assn A) (br set (λ_. True))"
lemmas [fcomp_norm_unfold] = lso_assn_def[symmetric]
lemma lso_is_pure[safe_constraint_rules]: "is_pure A ⟹ is_pure (lso_assn A)"
unfolding lso_assn_def by safe_constraint
lemma lso_empty_aref: "(uncurry0 (RETURN []), uncurry0 (RETURN op_set_empty))
∈ unit_rel →⇩f ⟨br set (λ_. True)⟩nres_rel"
by (auto simp: in_br_conv intro!: frefI nres_relI)
lemma lso_ins_aref: "(uncurry (RETURN oo ((#) )), uncurry (RETURN oo op_set_insert))
∈ Id ×⇩r br set (λ_. True) →⇩f ⟨br set (λ_. True)⟩nres_rel"
by (auto simp: in_br_conv intro!: frefI nres_relI)
sepref_decl_impl (no_register) lso_empty: hn_Nil[to_hfref] uses lso_empty_aref .
definition [simp]: "op_lso_empty ≡ op_set_empty"
lemma lso_fold_custom_empty:
"{} = op_lso_empty"
"op_set_empty = op_lso_empty"
by auto
lemmas [sepref_fr_rules] = lso_empty_hnr[folded op_lso_empty_def]
sepref_decl_impl lso_insert: hn_Cons[to_hfref] uses lso_ins_aref .
thm hn_Cons[FCOMP lso_ins_aref]
definition [simp]: "op_lso_bex P S ≡ ∃x∈S. P x"
lemma fold_lso_bex: "Bex ≡ λs P. op_lso_bex P s" by auto
definition [simp]: "mop_lso_bex P S ≡ ASSERT (∀x∈S. ∃y. P x = RETURN y) ⪢ RETURN (∃x∈S. P x = RETURN True)"
lemma op_mop_lso_bex: "RETURN (op_lso_bex P S) = mop_lso_bex (RETURN o P) S" by simp
sepref_register op_lso_bex
lemma lso_bex_arity[sepref_monadify_arity]:
"op_lso_bex ≡ λ⇩2P s. SP op_lso_bex$(λ⇩2x. P$x)$s" by (auto intro!: eq_reflection ext)
lemma op_lso_bex_monadify[sepref_monadify_comb]:
"EVAL$(op_lso_bex$(λ⇩2x. P x)$s) ≡ (⤜) $(EVAL$s)$(λ⇩2s. mop_lso_bex$(λ⇩2x. EVAL $ P x)$s)" by simp
definition "lso_abex P l ≡ nfoldli l (Not) (λx _. P x) False"
lemma lso_abex_to_set: "lso_abex P l ≤ mop_lso_bex P (set l)"
proof -
{ fix b
have "nfoldli l (Not) (λx _. P x) b ≤ ASSERT (∀x∈set l. ∃y. P x = RETURN y) ⪢ RETURN ((∃x∈set l. P x = RETURN True) ∨ b)"
apply (induction l arbitrary: b)
applyS simp
applyS (clarsimp simp add: pw_le_iff refine_pw_simps; blast)
done
} from this[of False] show ?thesis by (simp add: lso_abex_def)
qed
locale lso_bex_impl_loc =
fixes Pi and P :: "'a ⇒ bool nres"
fixes li :: "'c list" and l :: "'a list"
fixes A :: "'a ⇒ 'c ⇒ assn"
fixes F :: assn
assumes Prl: "⋀x xi. ⟦x∈set l⟧ ⟹ hn_refine (F * hn_ctxt A x xi) (Pi xi) (F * hn_ctxt A x xi) bool_assn (P x)"
begin
sepref_register l
sepref_register P
lemma [sepref_comb_rules]:
assumes "Γ ⟹⇩t F' * F * hn_ctxt A x xi"
assumes "x∈set l"
shows "hn_refine Γ (Pi xi) (F' * F * hn_ctxt A x xi) bool_assn (P$x)"
using hn_refine_frame[OF Prl[OF assms(2)], of Γ F'] assms(1)
by (simp add: assn_assoc)
schematic_goal lso_bex_impl:
"hn_refine (hn_ctxt (list_assn A) l li * F) (?c) (F * hn_ctxt (list_assn A) l li) bool_assn (lso_abex P l)"
unfolding lso_abex_def[abs_def]
by sepref
end
concrete_definition lso_bex_impl uses lso_bex_impl_loc.lso_bex_impl
lemma hn_lso_bex[sepref_prep_comb_rule,sepref_comb_rules]:
assumes FR: "Γ ⟹⇩t hn_ctxt (lso_assn A) s li * F"
assumes Prl: "⋀x xi. ⟦x∈s⟧ ⟹ hn_refine (F * hn_ctxt A x xi) (Pi xi) (F * hn_ctxt A x xi) bool_assn (P x)"
notes [simp del] = mop_lso_bex_def
shows "hn_refine Γ (lso_bex_impl Pi li) (F * hn_ctxt (lso_assn A) s li) bool_assn (mop_lso_bex$(λ⇩2x. P x)$s)"
apply (rule hn_refine_cons_pre[OF FR])
apply (clarsimp simp: hn_ctxt_def lso_assn_def hr_comp_def in_br_conv hnr_pre_ex_conv)
apply (rule hn_refine_preI)
apply (drule mod_starD; clarsimp)
apply (rule hn_refine_ref[OF lso_abex_to_set])
proof -
fix l assume [simp]: "s=set l"
from Prl have Prl': "⋀x xi. ⟦x∈set l⟧ ⟹ hn_refine (F * hn_ctxt A x xi) (Pi xi) (F * hn_ctxt A x xi) bool_assn (P x)"
by simp
show "hn_refine (list_assn A l li * F) (lso_bex_impl Pi li) (∃⇩Aba. F * list_assn A ba li * ↑ (set l = set ba)) bool_assn
(lso_abex P l)"
apply (rule hn_refine_cons[OF _ lso_bex_impl.refine])
applyS (simp add: hn_ctxt_def; rule entt_refl)
apply1 unfold_locales apply1 (rule Prl') applyS simp
applyS (sep_auto intro!: enttI simp: hn_ctxt_def)
applyS (rule entt_refl)
done
qed
end
Theory IICF_Multiset
section ‹Multiset Interface›
theory IICF_Multiset
imports "../../Sepref"
begin
subsection ‹Additions to Multiset Theory›
lemma rel_mset_Plus_gen:
assumes "rel_mset A m m'"
assumes "rel_mset A n n'"
shows "rel_mset A (m+n) (m'+n')"
using assms
by induction (auto simp: algebra_simps dest: rel_mset_Plus)
lemma rel_mset_single:
assumes "A x y"
shows "rel_mset A {#x#} {#y#}"
unfolding rel_mset_def
apply (rule exI[where x="[x]"])
apply (rule exI[where x="[y]"])
using assms by auto
lemma rel_mset_Minus:
assumes BIU: "bi_unique A"
shows "⟦ rel_mset A m n; A x y ⟧ ⟹ rel_mset A (m-{#x#}) (n-{#y#})"
unfolding rel_mset_def
proof clarsimp
fix ml nl
assume A: "A x y"
assume R: "list_all2 A ml nl"
show "∃ml'. mset ml' = mset ml - {#x#} ∧
(∃nl'. mset nl' = mset nl - {#y#} ∧ list_all2 A ml' nl')"
proof (cases "x∈set ml")
case False
have "y ∉ set nl" using A R
apply (auto simp: in_set_conv_decomp list_all2_append2 list_all2_Cons2)
using False BIU[unfolded bi_unique_alt_def]
apply (auto dest: left_uniqueD)
done
with False R show ?thesis by (auto simp: diff_single_trivial in_multiset_in_set)
next
case True
then obtain ml1 ml2 where [simp]: "ml=ml1@x#ml2" by (auto simp: in_set_conv_decomp)
then obtain nl1 nl2 where [simp]: "nl=nl1@y#nl2"
and LA: "list_all2 A ml1 nl1" "list_all2 A ml2 nl2"
using A R
apply (auto simp: in_set_conv_decomp list_all2_append1 list_all2_Cons1)
using BIU[unfolded bi_unique_alt_def]
apply (auto dest: right_uniqueD)
done
have
"mset (ml1@ml2) = mset ml - {#x#}"
"mset (nl1@nl2) = mset nl - {#y#}"
using R
by (auto simp: algebra_simps add_implies_diff union_assoc)
moreover have "list_all2 A (ml1@ml2) (nl1@nl2)"
by (rule list_all2_appendI) fact+
ultimately show ?thesis by blast
qed
qed
lemma rel_mset_Minus_gen:
assumes BIU: "bi_unique A"
assumes "rel_mset A m m'"
assumes "rel_mset A n n'"
shows "rel_mset A (m-n) (m'-n')"
using assms(3,2)
apply (induction R≡A _ _ rule: rel_mset_induct)
apply (auto dest: rel_mset_Minus[OF BIU] simp: algebra_simps)
done
lemma pcr_count:
assumes "bi_unique A"
shows "rel_fun (rel_mset A) (rel_fun A (=)) count count"
apply (intro rel_funI)
unfolding rel_mset_def
apply clarsimp
subgoal for x y xs ys
apply (rotate_tac,induction xs ys rule: list_all2_induct)
using assms
by (auto simp: bi_unique_alt_def left_uniqueD right_uniqueD)
done
subsection ‹Parametricity Setup›
definition [to_relAPP]: "mset_rel A ≡ p2rel (rel_mset (rel2p A))"
lemma rel2p_mset[rel2p]: "rel2p (⟨A⟩mset_rel) = rel_mset (rel2p A)"
by (simp add: mset_rel_def)
lemma p2re_mset[p2rel]: "p2rel (rel_mset A) = ⟨p2rel A⟩mset_rel"
by (simp add: mset_rel_def)
lemma mset_rel_empty[simp]:
"(a,{#})∈⟨A⟩mset_rel ⟷ a={#}"
"({#},b)∈⟨A⟩mset_rel ⟷ b={#}"
by (auto simp: mset_rel_def p2rel_def rel_mset_def)
lemma param_mset_empty[param]: "({#},{#}) ∈ ⟨A⟩mset_rel"
unfolding mset_rel_def
apply (simp add: p2rel_def)
by (rule rel_mset_Zero)
lemma param_mset_Plus[param]: "((+),(+))∈⟨A⟩mset_rel → ⟨A⟩mset_rel → ⟨A⟩mset_rel"
apply (rule rel2pD)
apply (simp add: rel2p)
apply (intro rel_funI)
by (rule rel_mset_Plus_gen)
lemma param_mset_add[param]: "(add_mset, add_mset) ∈ A → ⟨A⟩mset_rel → ⟨A⟩mset_rel"
apply (rule rel2pD)
apply (simp add: rel2p)
apply (intro rel_funI)
by (rule rel_mset_Plus)
lemma param_mset_minus[param]: "⟦single_valued A; single_valued (A¯)⟧
⟹ ((-), (-)) ∈ ⟨A⟩mset_rel → ⟨A⟩mset_rel → ⟨A⟩mset_rel"
apply (rule rel2pD)
apply (simp add: rel2p)
apply (intro rel_funI)
apply (rule rel_mset_Minus_gen)
subgoal apply (unfold IS_LEFT_UNIQUE_def[symmetric])
by (simp add: prop2p bi_unique_alt_def)
apply (simp; fail)
apply (simp; fail)
done
lemma param_count[param]: "⟦single_valued A; single_valued (A¯)⟧ ⟹ (count,count)∈⟨A⟩mset_rel → A → nat_rel"
apply (rule rel2pD)
apply (simp add: prop2p rel2p)
apply (rule pcr_count)
apply (simp add: bi_unique_alt_def)
done
lemma param_set_mset[param]:
shows "(set_mset, set_mset) ∈ ⟨A⟩mset_rel → ⟨A⟩set_rel"
apply (rule rel2pD; simp add: rel2p)
by (rule multiset.set_transfer)
definition [simp]: "mset_is_empty m ≡ m = {#}"
lemma mset_is_empty_param[param]: "(mset_is_empty,mset_is_empty) ∈ ⟨A⟩mset_rel → bool_rel"
unfolding mset_rel_def mset_is_empty_def[abs_def]
by (auto simp: p2rel_def rel_mset_def intro: nres_relI)
subsection ‹Operations›
sepref_decl_op mset_empty: "{#}" :: "⟨A⟩mset_rel" .
sepref_decl_op mset_is_empty: "λm. m={#}" :: "⟨A⟩mset_rel → bool_rel"
unfolding mset_is_empty_def[symmetric]
apply (rule frefI)
by parametricity
sepref_decl_op mset_insert: "add_mset" :: "A → ⟨A⟩mset_rel → ⟨A⟩mset_rel" .
sepref_decl_op mset_delete: "λx m. m - {#x#}" :: "A → ⟨A⟩mset_rel → ⟨A⟩mset_rel"
where "single_valued A" "single_valued (A¯)" .
sepref_decl_op mset_plus: "(+)::_ multiset ⇒ _" :: "⟨A⟩mset_rel → ⟨A⟩mset_rel → ⟨A⟩mset_rel" .
sepref_decl_op mset_minus: "(-)::_ multiset ⇒ _" :: "⟨A⟩mset_rel → ⟨A⟩mset_rel → ⟨A⟩mset_rel"
where "single_valued A" "single_valued (A¯)" .
sepref_decl_op mset_contains: "(∈#)" :: "A → ⟨A⟩mset_rel → bool_rel"
where "single_valued A" "single_valued (A¯)" .
sepref_decl_op mset_count: "λx y. count y x" :: "A → ⟨A⟩mset_rel → nat_rel"
where "single_valued A" "single_valued (A¯)" .
sepref_decl_op mset_pick: "λm. SPEC (λ(x,m'). m = {#x#} + m')" ::
"[λm. m≠{#}]⇩f ⟨A⟩mset_rel → A ×⇩r ⟨A⟩mset_rel"
unfolding mset_is_empty_def[symmetric]
apply (intro frefI nres_relI)
apply (refine_vcg SPEC_refine)
apply1 (rule ccontr; clarsimp)
applyS (metis msed_rel_invL rel2p_def rel2p_mset union_ac(2))
applyS parametricity
done
subsection ‹Patterns›
lemma [def_pat_rules]:
"{#} ≡ op_mset_empty"
"add_mset ≡ op_mset_insert"
"(=) $b${#} ≡ op_mset_is_empty$b"
"(=) ${#}$b ≡ op_mset_is_empty$b"
"(+) $a$b ≡ op_mset_plus$a$b"
"(-) $a$b ≡ op_mset_minus$a$b"
by (auto intro!: eq_reflection simp: algebra_simps)
lemma [def_pat_rules]:
"(+) $b$(add_mset$x${#}) ≡ op_mset_insert$x$b"
"(+) $(add_mset$x${#})$b ≡ op_mset_insert$x$b"
"(-) $b$(add_mset$x${#}) ≡ op_mset_delete$x$b"
"(<) $0$(count$a$x) ≡ op_mset_contains$x$a"
"(∈) $x$(set_mset$a) ≡ op_mset_contains$x$a"
by (auto intro!: eq_reflection simp: algebra_simps)
locale mset_custom_empty =
fixes rel empty and op_custom_empty :: "'a multiset"
assumes customize_hnr_aux: "(uncurry0 empty,uncurry0 (RETURN (op_mset_empty::'a multiset))) ∈ unit_assn⇧k →⇩a rel"
assumes op_custom_empty_def: "op_custom_empty = op_mset_empty"
begin
sepref_register op_custom_empty :: "'ax multiset"
lemma fold_custom_empty:
"{#} = op_custom_empty"
"op_mset_empty = op_custom_empty"
"mop_mset_empty = RETURN op_custom_empty"
unfolding op_custom_empty_def by simp_all
lemmas custom_hnr[sepref_fr_rules] = customize_hnr_aux[folded op_custom_empty_def]
end
end
Theory IICF_Prio_Bag
section ‹Priority Bag Interface›
theory IICF_Prio_Bag
imports IICF_Multiset
begin
subsection ‹Operations›
text ‹We prove quite general parametricity lemmas, but restrict
them to relations below identity when we register the operations.
This restriction, although not strictly necessary, makes usage of the tool
much simpler, as we do not need to handle different prio-functions for
abstract and concrete types.
›
context
fixes prio:: "'a ⇒ 'b::linorder"
begin
definition "mop_prio_pop_min b = ASSERT (b≠{#}) ⪢ SPEC (λ(v,b').
v ∈# b
∧ b'=b - {#v#}
∧ (∀v'∈set_mset b. prio v ≤ prio v'))"
definition "mop_prio_peek_min b ≡ ASSERT (b≠{#}) ⪢ SPEC (λv.
v ∈# b
∧ (∀v'∈set_mset b. prio v ≤ prio v'))"
end
lemma param_mop_prio_pop_min[param]:
assumes [param]: "(prio',prio) ∈ A → B"
assumes [param]: "((≤),(≤)) ∈ B → B → bool_rel"
shows "(mop_prio_pop_min prio',mop_prio_pop_min prio) ∈ ⟨A⟩mset_rel → ⟨A ×⇩r ⟨A⟩mset_rel⟩nres_rel"
unfolding mop_prio_pop_min_def[abs_def]
apply (clarsimp simp: mop_prio_pop_min_def nres_rel_def pw_le_iff refine_pw_simps)
apply (safe; simp)
proof goal_cases
case (1 m n x)
assume "(m,n)∈⟨A⟩mset_rel"
and "x∈#m"
and P': "∀x'∈set_mset m. prio' x ≤ prio' x'"
hence R: "rel_mset (rel2p A) m n" by (simp add: mset_rel_def p2rel_def)
from multi_member_split[OF ‹x∈#m›] obtain m' where [simp]: "m=m'+{#x#}" by auto
from msed_rel_invL[OF R[simplified]] obtain n' y where
[simp]: "n=n'+{#y#}" and [param, simp]: "(x,y)∈A" and R': "(m',n')∈⟨A⟩mset_rel"
by (auto simp: rel2p_def mset_rel_def p2rel_def)
have "∀y'∈set_mset n. prio y ≤ prio y'"
proof
fix y' assume "y'∈set_mset n"
then obtain x' where [param]: "(x',y')∈A" and "x'∈set_mset m"
using R
by (metis insert_DiffM msed_rel_invR rel2pD union_single_eq_member)
with P' have "prio' x ≤ prio' x'" by blast
moreover have "(prio' x ≤ prio' x', prio y ≤ prio y') ∈ bool_rel"
by parametricity
ultimately show "prio y ≤ prio y'" by simp
qed
thus
"∃a. (x, a) ∈ A ∧ (m - {#x#}, n - {#a#}) ∈ ⟨A⟩mset_rel ∧ a ∈# n ∧ (∀v'∈set_mset n. prio a ≤ prio v')"
using R' by (auto intro!: exI[where x=n'] exI[where x=y])
qed
lemma param_mop_prio_peek_min[param]:
assumes [param]: "(prio',prio) ∈ A → B"
assumes [param]: "((≤),(≤)) ∈ B → B → bool_rel"
shows "(mop_prio_peek_min prio',mop_prio_peek_min prio) ∈ ⟨A⟩mset_rel → ⟨A⟩nres_rel"
unfolding mop_prio_peek_min_def[abs_def]
apply (clarsimp
simp: mop_prio_pop_min_def nres_rel_def pw_le_iff refine_pw_simps
)
apply (safe; simp?)
proof -
fix m n x
assume "(m,n)∈⟨A⟩mset_rel"
and "x∈#m"
and P': "∀x'∈set_mset m. prio' x ≤ prio' x'"
hence R: "rel_mset (rel2p A) m n" by (simp add: mset_rel_def p2rel_def)
from multi_member_split[OF ‹x∈#m›] obtain m' where [simp]: "m=m'+{#x#}" by auto
from msed_rel_invL[OF R[simplified]] obtain n' y where
[simp]: "n=n'+{#y#}" and [param, simp]: "(x,y)∈A" and R': "(m',n')∈⟨A⟩mset_rel"
by (auto simp: rel2p_def mset_rel_def p2rel_def)
have "∀y'∈set_mset n. prio y ≤ prio y'"
proof
fix y' assume "y'∈set_mset n"
then obtain x' where [param]: "(x',y')∈A" and "x'∈set_mset m"
using R
by (metis msed_rel_invR mset_contains_eq rel2pD union_mset_add_mset_left union_single_eq_member)
with P' have "prio' x ≤ prio' x'" by blast
moreover have "(prio' x ≤ prio' x', prio y ≤ prio y') ∈ bool_rel"
by parametricity
ultimately show "prio y ≤ prio y'" by simp
qed
thus "∃y. (x, y) ∈ A ∧ y ∈# n ∧ (∀v'∈set_mset n. prio y ≤ prio v')"
using R' by (auto intro!: exI[where x=y])
qed
context fixes prio :: "'a ⇒ 'b::linorder" and A :: "('a×'a) set" begin
sepref_decl_op (no_def,no_mop) prio_pop_min:
"PR_CONST (mop_prio_pop_min prio)" :: "⟨A⟩mset_rel →⇩f ⟨A ×⇩r ⟨A⟩mset_rel⟩nres_rel"
where "IS_BELOW_ID A"
proof goal_cases
case 1
hence [param]: "(prio,prio)∈A → Id"
by (auto simp: IS_BELOW_ID_def)
show ?case
apply (rule fref_ncI)
apply parametricity
by auto
qed
sepref_decl_op (no_def,no_mop) prio_peek_min:
"PR_CONST (mop_prio_peek_min prio)" :: "⟨A⟩mset_rel →⇩f ⟨A⟩nres_rel"
where "IS_BELOW_ID A"
proof goal_cases
case 1
hence [param]: "(prio,prio)∈A → Id"
by (auto simp: IS_BELOW_ID_def)
show ?case
apply (rule fref_ncI)
apply parametricity
by auto
qed
end
subsection ‹Patterns›
lemma [def_pat_rules]:
"mop_prio_pop_min$prio ≡ UNPROTECT (mop_prio_pop_min prio)"
"mop_prio_peek_min$prio ≡ UNPROTECT (mop_prio_peek_min prio)"
by auto
end
Theory IICF_List_Mset
section ‹Multisets by Lists›
theory IICF_List_Mset
imports "../Intf/IICF_Multiset"
begin
subsection ‹Abstract Operations›
definition "list_mset_rel ≡ br mset (λ_. True)"
lemma lms_empty_aref: "([],op_mset_empty) ∈ list_mset_rel"
unfolding list_mset_rel_def by (auto simp: in_br_conv)
lemma lms_is_empty_aref: "(is_Nil,op_mset_is_empty) ∈ list_mset_rel → bool_rel"
unfolding list_mset_rel_def by (auto simp: in_br_conv split: list.splits)
lemma lms_insert_aref: "((#), op_mset_insert) ∈ Id → list_mset_rel → list_mset_rel"
unfolding list_mset_rel_def by (auto simp: in_br_conv)
lemma lms_union_aref: "((@), op_mset_plus) ∈ list_mset_rel → list_mset_rel → list_mset_rel"
unfolding list_mset_rel_def by (auto simp: in_br_conv)
lemma lms_pick_aref: "(λx#l ⇒ RETURN (x,l), mop_mset_pick) ∈ list_mset_rel → ⟨Id ×⇩r list_mset_rel⟩nres_rel"
unfolding list_mset_rel_def mop_mset_pick_alt[abs_def]
apply1 (refine_vcg nres_relI fun_relI)
apply1 (clarsimp simp: in_br_conv neq_Nil_conv)
apply1 (refine_vcg RETURN_SPEC_refine)
applyS (clarsimp simp: in_br_conv algebra_simps)
done
definition "list_contains x l ≡ list_ex ((=) x) l"
lemma lms_contains_aref: "(list_contains, op_mset_contains) ∈ Id → list_mset_rel → bool_rel"
unfolding list_mset_rel_def list_contains_def[abs_def]
by (auto simp: in_br_conv list_ex_iff in_multiset_in_set)
fun list_remove1 :: "'a ⇒ 'a list ⇒ 'a list" where
"list_remove1 x [] = []"
| "list_remove1 x (y#ys) = (if x=y then ys else y#list_remove1 x ys)"
lemma mset_list_remove1[simp]: "mset (list_remove1 x l) = mset l - {#x#}"
apply (induction l)
applyS simp
by (clarsimp simp: algebra_simps)
lemma lms_remove_aref: "(list_remove1, op_mset_delete) ∈ Id → list_mset_rel → list_mset_rel"
unfolding list_mset_rel_def by (auto simp: in_br_conv)
fun list_count :: "'a ⇒ 'a list ⇒ nat" where
"list_count _ [] = 0"
| "list_count x (y#ys) = (if x=y then 1 + list_count x ys else list_count x ys)"
lemma mset_list_count[simp]: "list_count x ys = count (mset ys) x"
by (induction ys) auto
lemma lms_count_aref: "(list_count, op_mset_count) ∈ Id → list_mset_rel → nat_rel"
unfolding list_mset_rel_def by (auto simp: in_br_conv)
definition list_remove_all :: "'a list ⇒ 'a list ⇒ 'a list" where
"list_remove_all xs ys ≡ fold list_remove1 ys xs"
lemma list_remove_all_mset[simp]: "mset (list_remove_all xs ys) = mset xs - mset ys"
unfolding list_remove_all_def
by (induction ys arbitrary: xs) (auto simp: algebra_simps)
lemma lms_minus_aref: "(list_remove_all,op_mset_minus) ∈ list_mset_rel → list_mset_rel → list_mset_rel"
unfolding list_mset_rel_def by (auto simp: in_br_conv)
subsection ‹Declaration of Implementations›
definition "list_mset_assn A ≡ pure (list_mset_rel O ⟨the_pure A⟩mset_rel)"
declare list_mset_assn_def[symmetric,fcomp_norm_unfold]
lemma [safe_constraint_rules]: "is_pure (list_mset_assn A)" unfolding list_mset_assn_def by simp
sepref_decl_impl (no_register) lms_empty: lms_empty_aref[sepref_param] .
definition [simp]: "op_list_mset_empty ≡ op_mset_empty"
lemma lms_fold_custom_empty:
"{#} = op_list_mset_empty"
"op_mset_empty = op_list_mset_empty"
by auto
sepref_register op_list_mset_empty
lemmas [sepref_fr_rules] = lms_empty_hnr[folded op_list_mset_empty_def]
sepref_decl_impl lms_is_empty: lms_is_empty_aref[sepref_param] .
sepref_decl_impl lms_insert: lms_insert_aref[sepref_param] .
sepref_decl_impl lms_union: lms_union_aref[sepref_param] .
lemma lms_pick_aref':
"(λx#l ⇒ return (x,l), mop_mset_pick) ∈ (pure list_mset_rel)⇧k →⇩a prod_assn id_assn (pure list_mset_rel)"
apply (simp only: prod_assn_pure_conv)
apply sepref_to_hoare
apply (sep_auto simp: refine_pw_simps list_mset_rel_def in_br_conv algebra_simps eintros del: exI)
done
sepref_decl_impl (ismop) lms_pick: lms_pick_aref' .
sepref_decl_impl lms_contains: lms_contains_aref[sepref_param] .
sepref_decl_impl lms_remove: lms_remove_aref[sepref_param] .
sepref_decl_impl lms_count: lms_count_aref[sepref_param] .
sepref_decl_impl lms_minus: lms_minus_aref[sepref_param] .
end
Theory IICF_List_MsetO
theory IICF_List_MsetO
imports "../Intf/IICF_Multiset"
begin
definition "lmso_assn A ≡ hr_comp (list_assn A) (br mset (λ_. True))"
lemmas [fcomp_norm_unfold] = lmso_assn_def[symmetric]
lemma lmso_is_pure[safe_constraint_rules]: "is_pure A ⟹ is_pure (lmso_assn A)"
unfolding lmso_assn_def by safe_constraint
lemma lmso_empty_aref: "(uncurry0 (RETURN []), uncurry0 (RETURN op_mset_empty)) ∈ unit_rel →⇩f ⟨br mset (λ_. True)⟩nres_rel"
by (auto intro!: frefI nres_relI simp: in_br_conv)
lemma lmso_is_empty_aref: "(RETURN o List.null, RETURN o op_mset_is_empty) ∈ br mset (λ_. True) →⇩f ⟨bool_rel⟩nres_rel"
by (auto intro!: frefI nres_relI simp: in_br_conv List.null_def split: list.split)
lemma lmso_insert_aref: "(uncurry (RETURN oo (#) ), uncurry (RETURN oo op_mset_insert)) ∈ (Id ×⇩r br mset (λ_. True)) →⇩f ⟨br mset (λ_. True)⟩nres_rel"
by (auto intro!: frefI nres_relI simp: in_br_conv)
definition [simp]: "hd_tl l ≡ (hd l, tl l)"
lemma hd_tl_opt[sepref_opt_simps]: "hd_tl l = (case l of (x#xs) ⇒ (x,xs) | _ ⇒ CODE_ABORT (λ_. (hd l, tl l)))"
by (auto split: list.split)
lemma lmso_pick_aref: "(RETURN o hd_tl,op_mset_pick) ∈ [λm. m≠{#}]⇩f br mset (λ_. True) → ⟨Id ×⇩r br mset (λ_. True)⟩nres_rel"
by (auto intro!: frefI nres_relI simp: in_br_conv pw_le_iff refine_pw_simps neq_Nil_conv algebra_simps)
lemma hd_tl_hnr: "(return o hd_tl,RETURN o hd_tl) ∈ [λl. ¬is_Nil l]⇩a (list_assn A)⇧d → prod_assn A (list_assn A)"
apply sepref_to_hoare
subgoal for l li by (cases l; cases li; sep_auto)
done
sepref_decl_impl (no_register) lmso_empty: hn_Nil[to_hfref] uses lmso_empty_aref .
definition [simp]: "op_lmso_empty ≡ op_mset_empty"
sepref_register op_lmso_empty
lemma lmso_fold_custom_empty:
"{#} = op_lmso_empty"
"op_mset_empty = op_lmso_empty"
"mop_mset_empty = RETURN op_lmso_empty"
by auto
lemmas [sepref_fr_rules] = lmso_empty_hnr[folded op_lmso_empty_def]
lemma list_null_hnr: "(return o List.null,RETURN o List.null) ∈ (list_assn A)⇧k →⇩a bool_assn"
apply sepref_to_hoare
subgoal for l li by (cases l; cases li; sep_auto simp: List.null_def)
done
sepref_decl_impl lmso_is_empty: list_null_hnr uses lmso_is_empty_aref .
sepref_decl_impl lmso_insert: hn_Cons[to_hfref] uses lmso_insert_aref .
context notes [simp] = in_br_conv and [split] = list.splits begin
text ‹Dummy lemma, to exloit ‹sepref_decl_impl› automation without parametricity stuff.›
private lemma op_mset_pick_dummy_param: "(op_mset_pick, op_mset_pick) ∈ Id →⇩f ⟨Id⟩nres_rel"
by (auto intro!: frefI nres_relI)
sepref_decl_impl lmso_pick: hd_tl_hnr[FCOMP lmso_pick_aref] uses op_mset_pick_dummy_param by simp
end
end
Theory IICF_List
theory IICF_List
imports
"../../Sepref"
"List-Index.List_Index"
begin
lemma param_index[param]:
"⟦single_valued A; single_valued (A¯)⟧ ⟹ (index,index) ∈ ⟨A⟩list_rel → A → nat_rel"
unfolding index_def[abs_def] find_index_def
apply (subgoal_tac "(((=), (=)) ∈ A → A → bool_rel)")
apply parametricity
by (simp add: pres_eq_iff_svb)
subsection ‹Swap two elements of a list, by index›
definition "swap l i j ≡ l[i := l!j, j:=l!i]"
lemma swap_nth[simp]: "⟦i < length l; j<length l; k<length l⟧ ⟹
swap l i j!k = (
if k=i then l!j
else if k=j then l!i
else l!k
)"
unfolding swap_def
by auto
lemma swap_set[simp]: "⟦ i < length l; j<length l ⟧ ⟹ set (swap l i j) = set l"
unfolding swap_def
by auto
lemma swap_multiset[simp]: "⟦ i < length l; j<length l ⟧ ⟹ mset (swap l i j) = mset l"
unfolding swap_def
by (auto simp: mset_swap)
lemma swap_length[simp]: "length (swap l i j) = length l"
unfolding swap_def
by auto
lemma swap_same[simp]: "swap l i i = l"
unfolding swap_def by auto
lemma distinct_swap[simp]:
"⟦i<length l; j<length l⟧ ⟹ distinct (swap l i j) = distinct l"
unfolding swap_def
by auto
lemma map_swap: "⟦i<length l; j<length l⟧
⟹ map f (swap l i j) = swap (map f l) i j"
unfolding swap_def
by (auto simp add: map_update)
lemma swap_param[param]: "⟦ i<length l; j<length l; (l',l)∈⟨A⟩list_rel; (i',i)∈nat_rel; (j',j)∈nat_rel⟧
⟹ (swap l' i' j', swap l i j)∈⟨A⟩list_rel"
unfolding swap_def
by parametricity
lemma swap_param_fref: "(uncurry2 swap,uncurry2 swap) ∈
[λ((l,i),j). i<length l ∧ j<length l]⇩f (⟨A⟩list_rel ×⇩r nat_rel) ×⇩r nat_rel → ⟨A⟩list_rel"
apply rule apply clarsimp
unfolding swap_def
apply parametricity
by simp_all
lemma param_list_null[param]: "(List.null,List.null) ∈ ⟨A⟩list_rel → bool_rel"
proof -
have 1: "List.null = (λ[] ⇒ True | _ ⇒ False)"
apply (rule ext) subgoal for l by (cases l) (auto simp: List.null_def)
done
show ?thesis unfolding 1 by parametricity
qed
subsection ‹Operations›
sepref_decl_op list_empty: "[]" :: "⟨A⟩list_rel" .
context notes [simp] = eq_Nil_null begin
sepref_decl_op list_is_empty: "λl. l=[]" :: "⟨A⟩list_rel →⇩f bool_rel" .
end
sepref_decl_op list_replicate: replicate :: "nat_rel → A → ⟨A⟩list_rel" .
definition op_list_copy :: "'a list ⇒ 'a list" where [simp]: "op_list_copy l ≡ l"
sepref_decl_op (no_def) list_copy: "op_list_copy" :: "⟨A⟩list_rel → ⟨A⟩list_rel" .
sepref_decl_op list_prepend: "(#)" :: "A → ⟨A⟩list_rel → ⟨A⟩list_rel" .
sepref_decl_op list_append: "λxs x. xs@[x]" :: "⟨A⟩list_rel → A → ⟨A⟩list_rel" .
sepref_decl_op list_concat: "(@)" :: "⟨A⟩list_rel → ⟨A⟩list_rel → ⟨A⟩list_rel" .
sepref_decl_op list_length: length :: "⟨A⟩list_rel → nat_rel" .
sepref_decl_op list_get: nth :: "[λ(l,i). i<length l]⇩f ⟨A⟩list_rel ×⇩r nat_rel → A" .
sepref_decl_op list_set: list_update :: "[λ((l,i),_). i<length l]⇩f (⟨A⟩list_rel ×⇩r nat_rel) ×⇩r A → ⟨A⟩list_rel" .
context notes [simp] = eq_Nil_null begin
sepref_decl_op list_hd: hd :: "[λl. l≠[]]⇩f ⟨A⟩list_rel → A" .
sepref_decl_op list_tl: tl :: "[λl. l≠[]]⇩f ⟨A⟩list_rel → ⟨A⟩list_rel" .
sepref_decl_op list_last: last :: "[λl. l≠[]]⇩f ⟨A⟩list_rel → A" .
sepref_decl_op list_butlast: butlast :: "[λl. l≠[]]⇩f ⟨A⟩list_rel → ⟨A⟩list_rel" .
end
sepref_decl_op list_contains: "λx l. x∈set l" :: "A → ⟨A⟩list_rel → bool_rel"
where "single_valued A" "single_valued (A¯)" .
sepref_decl_op list_swap: swap :: "[λ((l,i),j). i<length l ∧ j<length l]⇩f (⟨A⟩list_rel ×⇩r nat_rel) ×⇩r nat_rel → ⟨A⟩list_rel" .
sepref_decl_op list_rotate1: rotate1 :: "⟨A⟩list_rel → ⟨A⟩list_rel" .
sepref_decl_op list_rev: rev :: "⟨A⟩list_rel → ⟨A⟩list_rel" .
sepref_decl_op list_index: index :: "⟨A⟩list_rel → A → nat_rel"
where "single_valued A" "single_valued (A¯)" .
subsection ‹Patterns›
lemma [def_pat_rules]:
"[] ≡ op_list_empty"
"(=) $l$[] ≡ op_list_is_empty$l"
"(=) $[]$l ≡ op_list_is_empty$l"
"replicate$n$v ≡ op_list_replicate$n$v"
"Cons$x$xs ≡ op_list_prepend$x$xs"
"(@) $xs$(Cons$x$[]) ≡ op_list_append$xs$x"
"(@) $xs$ys ≡ op_list_concat$xs$ys"
"op_list_concat$xs$(Cons$x$[]) ≡ op_list_append$xs$x"
"length$xs ≡ op_list_length$xs"
"nth$l$i ≡ op_list_get$l$i"
"list_update$l$i$x ≡ op_list_set$l$i$x"
"hd$l ≡ op_list_hd$l"
"hd$l ≡ op_list_hd$l"
"tl$l ≡ op_list_tl$l"
"tl$l ≡ op_list_tl$l"
"last$l ≡ op_list_last$l"
"butlast$l ≡ op_list_butlast$l"
"(∈) $x$(set$l) ≡ op_list_contains$x$l"
"swap$l$i$j ≡ op_list_swap$l$i$j"
"rotate1$l ≡ op_list_rotate1$l"
"rev$l ≡ op_list_rev$l"
"index$l$x ≡ op_list_index$l$x"
by (auto intro!: eq_reflection)
text ‹Standard preconditions are preserved by list-relation. These lemmas are used for
simplification of preconditions after composition.›
lemma list_rel_pres_neq_nil[fcomp_prenorm_simps]: "(x',x)∈⟨A⟩list_rel ⟹ x'≠[] ⟷ x≠[]" by auto
lemma list_rel_pres_length[fcomp_prenorm_simps]: "(x',x)∈⟨A⟩list_rel ⟹ length x' = length x" by (rule list_rel_imp_same_length)
locale list_custom_empty =
fixes rel empty and op_custom_empty :: "'a list"
assumes customize_hnr_aux: "(uncurry0 empty,uncurry0 (RETURN (op_list_empty::'a list))) ∈ unit_assn⇧k →⇩a rel"
assumes op_custom_empty_def: "op_custom_empty = op_list_empty"
begin
sepref_register op_custom_empty :: "'c list"
lemma fold_custom_empty:
"[] = op_custom_empty"
"op_list_empty = op_custom_empty"
"mop_list_empty = RETURN op_custom_empty"
unfolding op_custom_empty_def by simp_all
lemmas custom_hnr[sepref_fr_rules] = customize_hnr_aux[folded op_custom_empty_def]
end
lemma gen_mop_list_swap: "mop_list_swap l i j = do {
xi ← mop_list_get l i;
xj ← mop_list_get l j;
l ← mop_list_set l i xj;
l ← mop_list_set l j xi;
RETURN l
}"
unfolding mop_list_swap_def
by (auto simp: pw_eq_iff refine_pw_simps swap_def)
end
Theory IICF_Abs_Heap
section ‹Heap Implementation On Lists›
theory IICF_Abs_Heap
imports
"HOL-Library.Multiset"
"../../../Sepref"
"List-Index.List_Index"
"../../Intf/IICF_List"
"../../Intf/IICF_Prio_Bag"
begin
text ‹
We define Min-Heaps, which implement multisets of prioritized values.
The operations are:
empty heap, emptiness check, insert an element,
remove a minimum priority element.›
subsection ‹Basic Definitions›
type_synonym 'a heap = "'a list"
locale heapstruct =
fixes prio :: "'a ⇒ 'b::linorder"
begin
definition valid :: "'a heap ⇒ nat ⇒ bool"
where "valid h i ≡ i>0 ∧ i≤length h"
abbreviation α :: "'a heap ⇒ 'a multiset" where "α ≡ mset"
lemma valid_empty[simp]: "¬valid [] i" by (auto simp: valid_def)
lemma valid0[simp]: "¬valid h 0" by (auto simp: valid_def)
lemma valid_glen[simp]: "i>length h ⟹ ¬valid h i" by (auto simp: valid_def)
lemma valid_len[simp]: "h≠[] ⟹ valid h (length h)" by (auto simp: valid_def)
lemma validI: "0<i ⟹ i≤length h ⟹ valid h i"
by (auto simp: valid_def)
definition val_of :: "'a heap ⇒ nat ⇒ 'a" where "val_of l i ≡ l!(i-1)"
abbreviation prio_of :: "'a heap ⇒ nat ⇒ 'b" where
"prio_of l i ≡ prio (val_of l i)"
subsubsection ‹Navigating the tree›
definition parent :: "nat ⇒ nat" where "parent i ≡ i div 2"
definition left :: "nat ⇒ nat" where "left i ≡ 2*i"
definition right :: "nat ⇒ nat" where "right i ≡ 2*i + 1"
abbreviation "has_parent h i ≡ valid h (parent i)"
abbreviation "has_left h i ≡ valid h (left i)"
abbreviation "has_right h i ≡ valid h (right i)"
abbreviation "vparent h i == val_of h (parent i)"
abbreviation "vleft h i == val_of h (left i)"
abbreviation "vright h i == val_of h (right i)"
abbreviation "pparent h i == prio_of h (parent i)"
abbreviation "pleft h i == prio_of h (left i)"
abbreviation "pright h i == prio_of h (right i)"
lemma parent_left_id[simp]: "parent (left i) = i"
unfolding parent_def left_def
by auto
lemma parent_right_id[simp]: "parent (right i) = i"
unfolding parent_def right_def
by auto
lemma child_of_parentD:
"has_parent l i ⟹ left (parent i) = i ∨ right (parent i) = i"
unfolding parent_def left_def right_def valid_def
by auto
lemma rc_imp_lc: "⟦valid h i; has_right h i⟧ ⟹ has_left h i"
by (auto simp: valid_def left_def right_def)
lemma plr_corner_cases[simp]:
assumes "0<i"
shows
"i≠parent i"
"i≠left i"
"i≠right i"
"parent i ≠ i"
"left i ≠ i"
"right i ≠ i"
using assms
by (auto simp: parent_def left_def right_def)
lemma i_eq_parent_conv[simp]: "i=parent i ⟷ i=0"
by (auto simp: parent_def)
subsubsection ‹Heap Property›
text ‹The heap property states, that every node's priority is greater
or equal to its parent's priority ›
definition heap_invar :: "'a heap ⇒ bool"
where "heap_invar l
≡ ∀i. valid l i ⟶ has_parent l i ⟶ pparent l i ≤ prio_of l i"
definition "heap_rel1 ≡ br α heap_invar"
lemma heap_invar_empty[simp]: "heap_invar []"
by (auto simp: heap_invar_def)
function heap_induction_scheme :: "nat ⇒ unit" where
"heap_induction_scheme i = (
if i>1 then heap_induction_scheme (parent i) else ())"
by pat_completeness auto
termination
apply (relation "less_than")
apply (auto simp: parent_def)
done
lemma
heap_parent_le: "⟦heap_invar l; valid l i; has_parent l i⟧
⟹ pparent l i ≤ prio_of l i"
unfolding heap_invar_def
by auto
lemma heap_min_prop:
assumes H: "heap_invar h"
assumes V: "valid h i"
shows "prio_of h (Suc 0) ≤ prio_of h i"
proof (cases "i>1")
case False with V show ?thesis
by (auto simp: valid_def intro: Suc_lessI)
next
case True
from V have "i≤length h" "valid h (Suc 0)" by (auto simp: valid_def)
with True show ?thesis
apply (induction i rule: heap_induction_scheme.induct)
apply (rename_tac i)
apply (case_tac "parent i = Suc 0")
apply (rule order_trans[rotated])
apply (rule heap_parent_le[OF H])
apply (auto simp: valid_def) [3]
apply (rule order_trans)
apply (rprems)
apply (auto simp: parent_def) [4]
apply (rule heap_parent_le[OF H])
apply (auto simp: valid_def parent_def)
done
qed
text ‹ Obviously, the heap property can also be stated in terms of children,
i.e., each node's priority is smaller or equal to it's children's priority.›
definition "children_ge h p i ≡
(has_left h i ⟶ p ≤ pleft h i)
∧ (has_right h i ⟶ p ≤ pright h i)"
definition "heap_invar' h ≡ ∀i. valid h i ⟶ children_ge h (prio_of h i) i"
lemma heap_eq_heap':
shows "heap_invar h ⟷ heap_invar' h"
unfolding heap_invar_def
unfolding heap_invar'_def children_ge_def
apply rule
apply auto []
apply clarsimp
apply (frule child_of_parentD)
apply auto []
done
subsection ‹Basic Operations›
text ‹The basic operations are the only operations that directly
modify the underlying data structure.›
subsubsection ‹Val-Of›
abbreviation (input) "val_of_pre l i ≡ valid l i"
definition val_of_op :: "'a heap ⇒ nat ⇒ 'a nres"
where "val_of_op l i ≡ ASSERT (i>0) ⪢ mop_list_get l (i-1)"
lemma val_of_correct[refine_vcg]:
"val_of_pre l i ⟹ val_of_op l i ≤ SPEC (λr. r = val_of l i)"
unfolding val_of_op_def val_of_def valid_def
by refine_vcg auto
abbreviation (input) "prio_of_pre ≡ val_of_pre"
definition "prio_of_op l i ≡ do {v ← val_of_op l i; RETURN (prio v)}"
lemma prio_of_op_correct[refine_vcg]:
"prio_of_pre l i ⟹ prio_of_op l i ≤ SPEC (λr. r = prio_of l i)"
unfolding prio_of_op_def
apply refine_vcg by simp
subsubsection ‹Update›
abbreviation "update_pre h i v ≡ valid h i"
definition update :: "'a heap ⇒ nat ⇒ 'a ⇒ 'a heap"
where "update h i v ≡ h[i - 1 := v]"
definition update_op :: "'a heap ⇒ nat ⇒ 'a ⇒ 'a heap nres"
where "update_op h i v ≡ ASSERT (i>0) ⪢ mop_list_set h (i-1) v"
lemma update_correct[refine_vcg]:
"update_pre h i v ⟹ update_op h i v ≤ SPEC(λr. r = update h i v)"
unfolding update_op_def update_def valid_def by refine_vcg auto
lemma update_valid[simp]: "valid (update h i v) j ⟷ valid h j"
by (auto simp: update_def valid_def)
lemma val_of_update[simp]: "⟦update_pre h i v; valid h j⟧ ⟹ val_of (update h i v) j = (
if i=j then v else val_of h j)"
unfolding update_def val_of_def
by (auto simp: nth_list_update valid_def)
lemma length_update[simp]: "length (update l i v) = length l"
by (auto simp: update_def)
subsubsection ‹Exchange›
text ‹ Exchange two elements ›
definition exch :: "'a heap ⇒ nat ⇒ nat ⇒ 'a heap" where
"exch l i j ≡ swap l (i - 1) (j - 1)"
abbreviation "exch_pre l i j ≡ valid l i ∧ valid l j"
definition exch_op :: "'a list ⇒ nat ⇒ nat ⇒ 'a list nres"
where "exch_op l i j ≡ do {
ASSERT (i>0 ∧ j>0);
l ← mop_list_swap l (i - 1) (j - 1);
RETURN l
}"
lemma exch_op_alt: "exch_op l i j = do {
vi ← val_of_op l i;
vj ← val_of_op l j;
l ← update_op l i vj;
l ← update_op l j vi;
RETURN l }"
by (auto simp: exch_op_def swap_def val_of_op_def update_op_def
pw_eq_iff refine_pw_simps)
lemma exch_op_correct[refine_vcg]:
"exch_pre l i j ⟹ exch_op l i j ≤ SPEC (λr. r = exch l i j)"
unfolding exch_op_def
apply refine_vcg
apply (auto simp: exch_def valid_def)
done
lemma valid_exch[simp]: "valid (exch l i j) k = valid l k"
unfolding exch_def by (auto simp: valid_def)
lemma val_of_exch[simp]: "⟦valid l i; valid l j; valid l k⟧ ⟹
val_of (exch l i j) k = (
if k=i then val_of l j
else if k=j then val_of l i
else val_of l k
)"
unfolding exch_def val_of_def valid_def
by (auto)
lemma exch_eq[simp]: "exch h i i = h"
by (auto simp: exch_def)
lemma α_exch[simp]: "⟦valid l i; valid l j⟧
⟹ α (exch l i j) = α l"
unfolding exch_def valid_def
by (auto)
lemma length_exch[simp]: "length (exch l i j) = length l"
by (auto simp: exch_def)
subsubsection ‹Butlast›
text ‹Remove last element›
abbreviation "butlast_pre l ≡ l≠[]"
definition butlast_op :: "'a heap ⇒ 'a heap nres"
where "butlast_op l ≡ mop_list_butlast l"
lemma butlast_op_correct[refine_vcg]:
"butlast_pre l ⟹ butlast_op l ≤ SPEC (λr. r = butlast l)"
unfolding butlast_op_def by (refine_vcg; auto)
lemma valid_butlast_conv[simp]: "valid (butlast h) i ⟷ valid h i ∧ i < length h"
by (auto simp: valid_def)
lemma valid_butlast: "valid (butlast h) i ⟹ valid h i"
by (cases h rule: rev_cases) (auto simp: valid_def)
lemma val_of_butlast[simp]: "⟦valid h i; i<length h⟧
⟹ val_of (butlast h) i = val_of h i"
by (auto simp: valid_def val_of_def nth_butlast)
lemma val_of_butlast'[simp]:
"valid (butlast h) i ⟹ val_of (butlast h) i = val_of h i"
by (cases h rule: rev_cases) (auto simp: valid_def val_of_def nth_append)
lemma α_butlast[simp]: "⟦ length h ≠ 0 ⟧
⟹ α (butlast h) = α h - {# val_of h (length h)#}"
apply (cases h rule: rev_cases)
apply (auto simp: val_of_def)
done
lemma heap_invar_butlast[simp]: "heap_invar h ⟹ heap_invar (butlast h)"
apply (cases "h = []")
apply simp
apply (auto simp: heap_invar_def dest: valid_butlast)
done
subsubsection ‹Append›
definition append_op :: "'a heap ⇒ 'a ⇒ 'a heap nres"
where "append_op l v ≡ mop_list_append l v"
lemma append_op_correct[refine_vcg]:
"append_op l v ≤ SPEC (λr. r = l@[v])"
unfolding append_op_def by (refine_vcg; auto)
lemma valid_append[simp]: "valid (l@[v]) i ⟷ valid l i ∨ i = length l + 1"
by (auto simp: valid_def)
lemma val_of_append[simp]: "valid (l@[v]) i ⟹
val_of (l@[v]) i = (if valid l i then val_of l i else v)"
unfolding valid_def val_of_def by (auto simp: nth_append)
lemma α_append[simp]: "α (l@[v]) = α l + {#v#}"
by (auto simp: )
subsection ‹Auxiliary operations›
text ‹The auxiliary operations do not have a corresponding abstract operation, but
are to restore the heap property after modification.›
subsubsection ‹Swim›
text ‹This invariant expresses that the heap has a single defect,
which can be repaired by swimming up›
definition swim_invar :: "'a heap ⇒ nat ⇒ bool"
where "swim_invar h i ≡
valid h i
∧ (∀j. valid h j ∧ has_parent h j ∧ j≠i ⟶ pparent h j ≤ prio_of h j)
∧ (has_parent h i ⟶
(∀j. valid h j ∧ has_parent h j ∧ parent j = i
⟶ pparent h i ≤ prio_of h j))"
text ‹Move up an element that is too small, until it fits›
definition swim_op :: "'a heap ⇒ nat ⇒ 'a heap nres" where
"swim_op h i ≡ do {
RECT (λswim (h,i). do {
ASSERT (valid h i ∧ swim_invar h i);
if has_parent h i then do {
ppi ← prio_of_op h (parent i);
pi ← prio_of_op h i;
if (¬ppi ≤ pi) then do {
h ← exch_op h i (parent i);
swim (h, parent i)
} else
RETURN h
} else
RETURN h
}) (h,i)
}"
lemma swim_invar_valid: "swim_invar h i ⟹ valid h i"
unfolding swim_invar_def by simp
lemma swim_invar_exit1: "¬has_parent h i ⟹ swim_invar h i ⟹ heap_invar h"
unfolding heap_invar_def swim_invar_def by auto
lemma swim_invar_exit2: "pparent h i ≤ prio_of h i ⟹ swim_invar h i ⟹ heap_invar h"
unfolding heap_invar_def swim_invar_def by auto
lemma swim_invar_pres:
assumes HPI: "has_parent h i"
assumes VIOLATED: "pparent h i > prio_of h i"
and INV: "swim_invar h i"
defines "h' ≡ exch h i (parent i)"
shows "swim_invar h' (parent i)"
unfolding swim_invar_def
apply safe
apply (simp add: h'_def HPI)
using HPI VIOLATED INV
unfolding swim_invar_def h'_def
apply auto []
using HPI VIOLATED INV
unfolding swim_invar_def h'_def
apply auto
by (metis order_trans)
lemma swim_invar_decr:
assumes INV: "heap_invar h"
assumes V: "valid h i"
assumes DECR: "prio v ≤ prio_of h i"
shows "swim_invar (update h i v) i"
using INV V DECR
apply (auto simp: swim_invar_def heap_invar_def intro: dual_order.trans)
done
lemma swim_op_correct[refine_vcg]:
"⟦swim_invar h i⟧ ⟹
swim_op h i ≤ SPEC (λh'. α h' = α h ∧ heap_invar h' ∧ length h' = length h)"
unfolding swim_op_def
using [[goals_limit = 1]]
apply (refine_vcg RECT_rule[where
pre="λ(hh,i).
swim_invar hh i
∧ α hh = α h
∧ length hh = length h" and
V = "inv_image less_than snd"
])
apply (auto) []
apply (auto) []
apply (auto) []
apply (auto) []
apply (auto simp: swim_invar_valid) []
apply (auto) []
apply (auto) []
apply (auto) []
apply rprems
apply (auto simp: swim_invar_pres) []
apply (auto simp: parent_def valid_def) []
apply (auto) []
apply (auto simp: swim_invar_exit2) []
apply (auto) []
apply (auto) []
apply (auto simp: swim_invar_exit1) []
apply (auto) []
done
subsubsection ‹Sink›
text ‹Move down an element that is too big, until it fits in›
definition sink_op :: "'a heap ⇒ nat ⇒ 'a heap nres" where
"sink_op h i ≡ do {
RECT (λsink (h,i). do {
ASSERT (valid h i);
if has_right h i then do {
ASSERT (has_left h i);
lp ← prio_of_op h (left i);
rp ← prio_of_op h (right i);
p ← prio_of_op h i;
if (lp < p ∧ rp ≥ lp) then do {
h ← exch_op h i (left i);
sink (h,left i)
} else if (rp<lp ∧ rp < p) then do {
h ← exch_op h i (right i);
sink (h,right i)
} else
RETURN h
} else if (has_left h i) then do {
lp ← prio_of_op h (left i);
p ← prio_of_op h i;
if (lp < p) then do {
h ← exch_op h i (left i);
sink (h,left i)
} else
RETURN h
} else
RETURN h
}) (h,i)
}"
text ‹This invariant expresses that the heap has a single defect,
which can be repaired by sinking›
definition "sink_invar l i ≡
valid l i
∧ (∀j. valid l j ∧ j≠i ⟶ children_ge l (prio_of l j) j)
∧ (has_parent l i ⟶ children_ge l (pparent l i) i)"
lemma sink_invar_valid: "sink_invar l i ⟹ valid l i"
unfolding sink_invar_def by auto
lemma sink_invar_exit: "⟦sink_invar l i; children_ge l (prio_of l i) i⟧
⟹ heap_invar' l"
unfolding heap_invar'_def sink_invar_def
by auto
lemma sink_aux1: "¬ (2*i ≤ length h) ⟹ ¬has_left h i ∧ ¬has_right h i"
unfolding valid_def left_def right_def by auto
lemma sink_invar_pres1:
assumes "sink_invar h i"
assumes "has_left h i" "has_right h i"
assumes "prio_of h i ≥ pleft h i"
assumes "pleft h i ≥ pright h i"
shows "sink_invar (exch h i (right i)) (right i)"
using assms
unfolding sink_invar_def
apply auto
apply (auto simp: children_ge_def)
done
lemma sink_invar_pres2:
assumes "sink_invar h i"
assumes "has_left h i" "has_right h i"
assumes "prio_of h i ≥ pleft h i"
assumes "pleft h i ≤ pright h i"
shows "sink_invar (exch h i (left i)) (left i)"
using assms
unfolding sink_invar_def
apply auto
apply (auto simp: children_ge_def)
done
lemma sink_invar_pres3:
assumes "sink_invar h i"
assumes "has_left h i" "has_right h i"
assumes "prio_of h i ≥ pright h i"
assumes "pleft h i ≤ pright h i"
shows "sink_invar (exch h i (left i)) (left i)"
using assms
unfolding sink_invar_def
apply auto
apply (auto simp: children_ge_def)
done
lemma sink_invar_pres4:
assumes "sink_invar h i"
assumes "has_left h i" "has_right h i"
assumes "prio_of h i ≥ pright h i"
assumes "pleft h i ≥ pright h i"
shows "sink_invar (exch h i (right i)) (right i)"
using assms
unfolding sink_invar_def
apply auto
apply (auto simp: children_ge_def)
done
lemma sink_invar_pres5:
assumes "sink_invar h i"
assumes "has_left h i" "¬has_right h i"
assumes "prio_of h i ≥ pleft h i"
shows "sink_invar (exch h i (left i)) (left i)"
using assms
unfolding sink_invar_def
apply auto
apply (auto simp: children_ge_def)
done
lemmas sink_invar_pres =
sink_invar_pres1
sink_invar_pres2
sink_invar_pres3
sink_invar_pres4
sink_invar_pres5
lemma sink_invar_incr:
assumes INV: "heap_invar h"
assumes V: "valid h i"
assumes INCR: "prio v ≥ prio_of h i"
shows "sink_invar (update h i v) i"
using INV V INCR
apply (auto simp: sink_invar_def)
apply (auto simp: children_ge_def heap_invar_def) []
apply (auto simp: children_ge_def heap_invar_def intro: order_trans) []
apply (frule spec[where x="left i"])
apply auto []
apply (frule spec[where x="right i"])
apply auto []
done
lemma sink_op_correct[refine_vcg]:
"⟦sink_invar h i⟧ ⟹
sink_op h i ≤ SPEC (λh'. α h' = α h ∧ heap_invar h' ∧ length h' = length h)"
unfolding sink_op_def heap_eq_heap'
using [[goals_limit = 1]]
apply (refine_vcg RECT_rule[where
pre="λ(hh,i). sink_invar hh i ∧ α hh = α h ∧ length hh = length h" and
V = "measure (λ(l,i). length l - i)"
])
apply (auto) []
apply (auto) []
apply (auto) []
apply (auto) []
apply (auto simp: sink_invar_valid) []
apply (auto simp: valid_def left_def right_def) []
apply rprems
apply (auto intro: sink_invar_pres) []
apply (auto simp: valid_def left_def right_def) []
apply rprems
apply (auto intro: sink_invar_pres) []
apply (auto simp: valid_def left_def right_def) []
apply (auto) []
apply clarsimp
apply (rule sink_invar_exit, assumption) []
apply (auto simp: children_ge_def) []
apply (auto) []
apply rprems
apply (auto intro: sink_invar_pres) []
apply (auto simp: valid_def left_def right_def) []
apply (auto) []
apply clarsimp
apply (rule sink_invar_exit, assumption) []
apply (auto simp: children_ge_def) []
apply (auto) []
apply (auto) []
apply clarsimp
apply (rule sink_invar_exit, assumption) []
apply (auto simp: children_ge_def) []
apply (auto) []
done
lemma sink_op_swim_rule:
"swim_invar h i ⟹ sink_op h i ≤ SPEC (λh'. h'=h)"
apply (frule swim_invar_valid)
unfolding sink_op_def
apply (subst RECT_unfold, refine_mono)
apply (fold sink_op_def)
apply refine_vcg
apply (simp_all)
apply (auto simp add: valid_def left_def right_def dest: swim_invar_valid) []
apply (auto simp: swim_invar_def) []
apply (auto simp: swim_invar_def) []
apply (auto simp: swim_invar_def) []
apply (auto simp: swim_invar_def) []
apply (auto simp: swim_invar_def) []
apply (auto simp: swim_invar_def) []
done
definition sink_op_opt
where
"sink_op_opt h k ≡ RECT (λD (h,k). do {
ASSERT (k>0 ∧ k≤length h);
let len = length h;
if (2*k ≤ len) then do {
let j = 2*k;
pj ← prio_of_op h j;
j ← (
if j<len then do {
psj ← prio_of_op h (Suc j);
if pj>psj then RETURN (j+1) else RETURN j
} else RETURN j);
pj ← prio_of_op h j;
pk ← prio_of_op h k;
if (pk > pj) then do {
h ← exch_op h k j;
D (h,j)
} else
RETURN h
} else RETURN h
}) (h,k)"
lemma sink_op_opt_eq: "sink_op_opt h k = sink_op h k"
unfolding sink_op_opt_def sink_op_def
apply (fo_rule arg_cong fun_cong)+
apply (intro ext)
unfolding sink_op_def[symmetric]
apply (simp cong: if_cong split del: if_split add: Let_def)
apply (auto simp: valid_def left_def right_def prio_of_op_def val_of_op_def
val_of_def less_imp_diff_less ASSERT_same_eq_conv nz_le_conv_less) []
done
subsubsection ‹Repair›
text ‹Repair a local defect in the heap. This can be done
by swimming and sinking. Note that, depending on the defect, only one
of the operations will change the heap.
Moreover, note that we do not need repair to implement the heap operations.
However, it is required for heapmaps. ›
definition "repair_op h i ≡ do {
h ← sink_op h i;
h ← swim_op h i;
RETURN h
}"
lemma update_sink_swim_cases:
assumes "heap_invar h"
assumes "valid h i"
obtains "swim_invar (update h i v) i" | "sink_invar (update h i v) i"
apply (cases rule: linear[of "prio v" "prio_of h i", THEN disjE])
apply (blast dest: swim_invar_decr[OF assms])
apply (blast dest: sink_invar_incr[OF assms])
done
lemma heap_invar_imp_swim_invar: "⟦heap_invar h; valid h i⟧ ⟹ swim_invar h i"
unfolding heap_invar_def swim_invar_def
by (auto intro: order_trans)
lemma repair_correct[refine_vcg]:
assumes "heap_invar h" and "valid h i"
shows "repair_op (update h i v) i ≤ SPEC (λh'.
heap_invar h' ∧ α h' = α (update h i v) ∧ length h' = length h)"
apply (rule update_sink_swim_cases[of h i v, OF assms])
unfolding repair_op_def
apply (refine_vcg sink_op_swim_rule)
apply auto [4]
apply (refine_vcg)
using assms(2)
apply (auto intro: heap_invar_imp_swim_invar simp: valid_def) []
apply auto [3]
done
subsection ‹Operations›
subsubsection ‹Empty›
abbreviation (input) empty :: "'a heap"
where "empty ≡ []"
definition empty_op :: "'a heap nres"
where "empty_op ≡ mop_list_empty"
lemma empty_op_correct[refine_vcg]:
"empty_op ≤ SPEC (λr. α r = {#} ∧ heap_invar r)"
unfolding empty_op_def apply refine_vcg by auto
subsubsection ‹Emptiness check›
definition is_empty_op :: "'a heap ⇒ bool nres"
where "is_empty_op h ≡ do {ASSERT (heap_invar h); let l=length h; RETURN (l=0)}"
lemma is_empty_op_correct[refine_vcg]:
"heap_invar h ⟹ is_empty_op h ≤ SPEC (λr. r⟷α h = {#})"
unfolding is_empty_op_def
apply refine_vcg by auto
subsubsection ‹Insert›
definition insert_op :: "'a ⇒ 'a heap ⇒ 'a heap nres"
where "insert_op v h ≡ do {
ASSERT (heap_invar h);
h ← append_op h v;
let l = length h;
h ← swim_op h l;
RETURN h
}"
lemma swim_invar_insert: "heap_invar l ⟹ swim_invar (l@[x]) (Suc (length l))"
unfolding swim_invar_def heap_invar_def valid_def parent_def val_of_def
by (fastforce simp: nth_append)
lemma
"(insert_op,RETURN oo op_mset_insert) ∈ Id → heap_rel1 → ⟨heap_rel1⟩nres_rel"
unfolding insert_op_def[abs_def] heap_rel1_def o_def
by refine_vcg (auto simp: swim_invar_insert in_br_conv)
lemma insert_op_correct:
"heap_invar h ⟹ insert_op v h ≤ SPEC (λh'. heap_invar h' ∧ α h' = α h + {#v#})"
unfolding insert_op_def
by (refine_vcg) (auto simp: swim_invar_insert)
lemmas [refine_vcg] = insert_op_correct
subsubsection ‹Pop minimum element›
definition pop_min_op :: "'a heap ⇒ ('a × 'a heap) nres" where
"pop_min_op h ≡ do {
ASSERT (heap_invar h);
ASSERT (valid h 1);
m ← val_of_op h 1;
let l = length h;
h ← exch_op h 1 l;
h ← butlast_op h;
if (l≠1) then do {
h ← sink_op h 1;
RETURN (m,h)
} else RETURN (m,h)
}"
lemma left_not_one[simp]: "left j ≠ Suc 0"
by (auto simp: left_def)
lemma right_one_conv[simp]: "right j = Suc 0 ⟷ j=0"
by (auto simp: right_def)
lemma parent_one_conv[simp]: "parent (Suc 0) = 0"
by (auto simp: parent_def)
lemma sink_invar_init:
assumes I: "heap_invar h"
assumes NE: "length h > 1"
shows "sink_invar (butlast (exch h (Suc 0) (length h))) (Suc 0)"
proof -
from NE have V: "valid h (Suc 0)" "valid h (length h)"
apply -
apply (auto simp: valid_def neq_Nil_conv) []
by (cases h) (auto simp: valid_def)
show ?thesis using I
unfolding heap_eq_heap' heap_invar'_def sink_invar_def
apply (intro impI conjI allI)
using NE apply (auto simp: V valid_butlast_conv) []
apply (auto simp add: children_ge_def V NE valid_butlast_conv) []
apply (auto simp add: children_ge_def V NE valid_butlast_conv) []
done
qed
lemma in_set_conv_val: "v ∈ set h ⟷ (∃i. valid h i ∧ v = val_of h i)"
apply (rule iffI)
apply (clarsimp simp add: valid_def val_of_def in_set_conv_nth)
apply (rule_tac x="Suc i" in exI; auto)
apply (clarsimp simp add: valid_def val_of_def in_set_conv_nth)
apply (rule_tac x="i - Suc 0" in exI; auto)
done
lemma pop_min_op_correct:
assumes "heap_invar h" "α h ≠ {#}"
shows "pop_min_op h ≤ SPEC (λ(v,h'). heap_invar h' ∧
v ∈# α h ∧ α h' = α h - {#v#} ∧ (∀v'∈set_mset (α h). prio v ≤ prio v'))"
proof -
note [simp del] = length_greater_0_conv
note LG = length_greater_0_conv[symmetric]
from assms show ?thesis
unfolding pop_min_op_def
apply refine_vcg
apply (simp_all add: sink_invar_init LG)
apply (auto simp: valid_def) []
apply (cases h; auto simp: val_of_def) []
apply (auto simp: in_set_conv_val simp: heap_min_prop) []
apply auto []
apply (cases h; auto simp: val_of_def) []
apply auto []
apply (cases h; auto simp: val_of_def) []
done
qed
lemmas [refine_vcg] = pop_min_op_correct
subsubsection ‹Peek minimum element›
definition peek_min_op :: "'a heap ⇒ 'a nres" where
"peek_min_op h ≡ do {
ASSERT (heap_invar h);
ASSERT (valid h 1);
val_of_op h 1
}"
lemma peek_min_op_correct:
assumes "heap_invar h" "α h ≠ {#}"
shows "peek_min_op h ≤ SPEC (λv.
v ∈# α h ∧ (∀v'∈set_mset (α h). prio v ≤ prio v'))"
unfolding peek_min_op_def
apply refine_vcg
using assms
apply clarsimp_all
apply (auto simp: valid_def) []
apply (cases h; auto simp: val_of_def) []
apply (auto simp: in_set_conv_val simp: heap_min_prop) []
done
lemmas peek_min_op_correct'[refine_vcg] = peek_min_op_correct
subsection ‹Operations as Relator-Style Refinement›
lemma empty_op_refine: "(empty_op,RETURN op_mset_empty)∈⟨heap_rel1⟩nres_rel"
apply (rule nres_relI)
apply (rule order_trans)
apply (rule empty_op_correct)
apply (auto simp: heap_rel1_def br_def pw_le_iff refine_pw_simps)
done
lemma is_empty_op_refine: "(is_empty_op,RETURN o op_mset_is_empty) ∈ heap_rel1 → ⟨bool_rel⟩nres_rel"
apply (intro nres_relI fun_relI; simp)
apply refine_vcg
apply (auto simp: heap_rel1_def br_def)
done
lemma insert_op_refine: "(insert_op,RETURN oo op_mset_insert) ∈ Id → heap_rel1 → ⟨heap_rel1⟩nres_rel"
apply (intro nres_relI fun_relI; simp)
apply (refine_vcg RETURN_as_SPEC_refine)
apply (auto simp: heap_rel1_def br_def pw_le_iff refine_pw_simps)
done
lemma pop_min_op_refine:
"(pop_min_op, PR_CONST (mop_prio_pop_min prio)) ∈ heap_rel1 → ⟨Id ×⇩r heap_rel1⟩nres_rel"
apply (intro fun_relI nres_relI)
unfolding mop_prio_pop_min_def PR_CONST_def
apply (refine_vcg SPEC_refine)
apply (auto simp: heap_rel1_def br_def)
done
lemma peek_min_op_refine:
"(peek_min_op, PR_CONST (mop_prio_peek_min prio)) ∈ heap_rel1 → ⟨Id⟩nres_rel"
apply (intro fun_relI nres_relI)
unfolding mop_prio_peek_min_def PR_CONST_def
apply (refine_vcg RES_refine)
apply (auto simp: heap_rel1_def br_def)
done
end
end
Theory IICF_HOL_List
theory IICF_HOL_List
imports "../Intf/IICF_List"
begin
context
begin
private lemma id_take_nth_drop_rl:
assumes "i<length l"
assumes "⋀l1 x l2. ⟦l=l1@x#l2; i = length l1 ⟧ ⟹ P (l1@x#l2)"
shows "P l"
apply (subst id_take_nth_drop[OF assms(1)])
apply (rule assms(2))
apply (subst id_take_nth_drop[OF assms(1)])
apply simp
apply (simp add: assms(1))
done
private lemma list_set_entails_aux:
shows "list_assn A l li * A x xi ⟹⇩A list_assn A (l[i := x]) (li[i := xi]) * true"
apply (rule entails_preI)
apply (clarsimp)
apply (cases "i < length l"; cases "i < length li"; (sep_auto dest!: list_assn_aux_eqlen_simp;fail)?)
apply (erule id_take_nth_drop_rl)
apply (erule id_take_nth_drop_rl)
apply (sep_auto simp add: list_update_append)
done
private lemma list_set_hd_tl_aux:
"a ≠ [] ⟹ list_assn R a c ⟹⇩A R (hd a) (hd c) * true"
"a ≠ [] ⟹ list_assn R a c ⟹⇩A list_assn R (tl a) (tl c) * true"
by (cases c; cases a; sep_auto; fail)+
private lemma list_set_last_butlast_aux:
"a ≠ [] ⟹ list_assn R a c ⟹⇩A R (last a) (last c) * true"
"a ≠ [] ⟹ list_assn R a c ⟹⇩A list_assn R (butlast a) (butlast c) * true"
by (cases c rule: rev_cases; cases a rule: rev_cases; sep_auto; fail)+
private lemma swap_decomp_simp[simp]:
"swap (l1 @ x # c21' @ xa # l2a) (length l1) (Suc (length l1 + length c21')) = l1@xa#c21'@x#l2a"
"swap (l1 @ x # c21' @ xa # l2a) (Suc (length l1 + length c21')) (length l1) = l1@xa#c21'@x#l2a"
by (auto simp: swap_def list_update_append nth_append)
private lemma list_swap_aux: "⟦i < length l; j < length l⟧ ⟹ list_assn A l li ⟹⇩A list_assn A (swap l i j) (swap li i j) * true"
apply (subst list_assn_aux_len; clarsimp)
apply (cases "i=j"; (sep_auto;fail)?)
apply (rule id_take_nth_drop_rl[where l=l and i=i]; simp)
apply (rule id_take_nth_drop_rl[where l=l and i=j]; simp)
apply (erule list_match_lel_lel; simp)
apply (split_list_according li l; sep_auto)
apply (split_list_according li l; sep_auto)
done
private lemma list_rotate1_aux: "list_assn A a c ⟹⇩A list_assn A (rotate1 a) (rotate1 c) * true"
by (cases a; cases c; sep_auto)
private lemma list_rev_aux: "list_assn A a c ⟹⇩A list_assn A (rev a) (rev c) * true"
apply (subst list_assn_aux_len; clarsimp)
apply (induction rule: list_induct2)
apply sep_auto
apply sep_auto
apply (erule ent_frame_fwd, frame_inference)
apply sep_auto
done
lemma mod_starE:
assumes "h ⊨ A*B"
obtains h1 h2 where "h1⊨A" "h2⊨B"
using assms by (auto simp: mod_star_conv)
private lemma CONSTRAINT_is_pureE:
assumes "CONSTRAINT is_pure A"
obtains R where "A=pure R"
using assms by (auto simp: is_pure_conv)
private method solve_dbg =
( (elim CONSTRAINT_is_pureE; (simp only: list_assn_pure_conv the_pure_pure)?)?;
sep_auto
simp: pure_def hn_ctxt_def invalid_assn_def list_assn_aux_eqlen_simp
intro!: hn_refineI[THEN hn_refine_preI] hfrefI
elim!: mod_starE
intro: list_set_entails_aux list_set_hd_tl_aux list_set_last_butlast_aux
list_swap_aux list_rotate1_aux list_rev_aux
;
((rule entails_preI; sep_auto simp: list_assn_aux_eqlen_simp | (parametricity; simp; fail))?)
)
private method solve = solve_dbg; fail
lemma HOL_list_empty_hnr_aux: "(uncurry0 (return op_list_empty), uncurry0 (RETURN op_list_empty)) ∈ unit_assn⇧k →⇩a (list_assn A)" by solve
lemma HOL_list_is_empty_hnr[sepref_fr_rules]: "(return ∘ op_list_is_empty, RETURN ∘ op_list_is_empty) ∈ (list_assn A)⇧k →⇩a bool_assn" by solve
lemma HOL_list_prepend_hnr[sepref_fr_rules]: "(uncurry (return ∘∘ op_list_prepend), uncurry (RETURN ∘∘ op_list_prepend)) ∈ A⇧d *⇩a (list_assn A)⇧d →⇩a list_assn A" by solve
lemma HOL_list_append_hnr[sepref_fr_rules]: "(uncurry (return ∘∘ op_list_append), uncurry (RETURN ∘∘ op_list_append)) ∈ (list_assn A)⇧d *⇩a A⇧d →⇩a list_assn A" by solve
lemma HOL_list_concat_hnr[sepref_fr_rules]: "(uncurry (return ∘∘ op_list_concat), uncurry (RETURN ∘∘ op_list_concat)) ∈ (list_assn A)⇧d *⇩a (list_assn A)⇧d →⇩a list_assn A" by solve
lemma HOL_list_length_hnr[sepref_fr_rules]: "(return ∘ op_list_length, RETURN ∘ op_list_length) ∈ (list_assn A)⇧k →⇩a nat_assn" by solve
lemma HOL_list_set_hnr[sepref_fr_rules]: "(uncurry2 (return ∘∘∘ op_list_set), uncurry2 (RETURN ∘∘∘ op_list_set)) ∈ (list_assn A)⇧d *⇩a nat_assn⇧k *⇩a A⇧d →⇩a list_assn A" by solve
lemma HOL_list_hd_hnr[sepref_fr_rules]: "(return ∘ op_list_hd, RETURN ∘ op_list_hd) ∈ [λy. y ≠ []]⇩a (list_assn R)⇧d → R" by solve
lemma HOL_list_tl_hnr[sepref_fr_rules]: "(return ∘ op_list_tl, RETURN ∘ op_list_tl) ∈ [λy. y ≠ []]⇩a (list_assn A)⇧d → list_assn A" by solve
lemma HOL_list_last_hnr[sepref_fr_rules]: "(return ∘ op_list_last, RETURN ∘ op_list_last) ∈ [λy. y ≠ []]⇩a (list_assn R)⇧d → R" by solve
lemma HOL_list_butlast_hnr[sepref_fr_rules]: "(return ∘ op_list_butlast, RETURN ∘ op_list_butlast) ∈ [λy. y ≠ []]⇩a (list_assn A)⇧d → list_assn A" by solve
lemma HOL_list_swap_hnr[sepref_fr_rules]: "(uncurry2 (return ∘∘∘ op_list_swap), uncurry2 (RETURN ∘∘∘ op_list_swap))
∈ [λ((a, b), ba). b < length a ∧ ba < length a]⇩a (list_assn A)⇧d *⇩a nat_assn⇧k *⇩a nat_assn⇧k → list_assn A" by solve
lemma HOL_list_rotate1_hnr[sepref_fr_rules]: "(return ∘ op_list_rotate1, RETURN ∘ op_list_rotate1) ∈ (list_assn A)⇧d →⇩a list_assn A" by solve
lemma HOL_list_rev_hnr[sepref_fr_rules]: "(return ∘ op_list_rev, RETURN ∘ op_list_rev) ∈ (list_assn A)⇧d →⇩a list_assn A" by solve
lemma HOL_list_replicate_hnr[sepref_fr_rules]: "CONSTRAINT is_pure A ⟹ (uncurry (return ∘∘ op_list_replicate), uncurry (RETURN ∘∘ op_list_replicate)) ∈ nat_assn⇧k *⇩a A⇧k →⇩a list_assn A" by solve
lemma HOL_list_get_hnr[sepref_fr_rules]: "CONSTRAINT is_pure A ⟹ (uncurry (return ∘∘ op_list_get), uncurry (RETURN ∘∘ op_list_get)) ∈ [λ(a, b). b < length a]⇩a (list_assn A)⇧k *⇩a nat_assn⇧k → A" by solve
private lemma bool_by_paramE: "⟦ a; (a,b)∈Id ⟧ ⟹ b" by simp
private lemma bool_by_paramE': "⟦ a; (b,a)∈Id ⟧ ⟹ b" by simp
lemma HOL_list_contains_hnr[sepref_fr_rules]: "⟦CONSTRAINT is_pure A; single_valued (the_pure A); single_valued ((the_pure A)¯)⟧
⟹ (uncurry (return ∘∘ op_list_contains), uncurry (RETURN ∘∘ op_list_contains)) ∈ A⇧k *⇩a (list_assn A)⇧k →⇩a bool_assn"
apply solve_dbg
apply (erule bool_by_paramE[where a="_∈set _"]) apply parametricity
apply (erule bool_by_paramE'[where a="_∈set _"]) apply parametricity
done
lemmas HOL_list_empty_hnr_mop = HOL_list_empty_hnr_aux[FCOMP mk_mop_rl0_np[OF mop_list_empty_alt]]
lemmas HOL_list_is_empty_hnr_mop[sepref_fr_rules] = HOL_list_is_empty_hnr[FCOMP mk_mop_rl1_np[OF mop_list_is_empty_alt]]
lemmas HOL_list_prepend_hnr_mop[sepref_fr_rules] = HOL_list_prepend_hnr[FCOMP mk_mop_rl2_np[OF mop_list_prepend_alt]]
lemmas HOL_list_append_hnr_mop[sepref_fr_rules] = HOL_list_append_hnr[FCOMP mk_mop_rl2_np[OF mop_list_append_alt]]
lemmas HOL_list_concat_hnr_mop[sepref_fr_rules] = HOL_list_concat_hnr[FCOMP mk_mop_rl2_np[OF mop_list_concat_alt]]
lemmas HOL_list_length_hnr_mop[sepref_fr_rules] = HOL_list_length_hnr[FCOMP mk_mop_rl1_np[OF mop_list_length_alt]]
lemmas HOL_list_set_hnr_mop[sepref_fr_rules] = HOL_list_set_hnr[FCOMP mk_mop_rl3[OF mop_list_set_alt]]
lemmas HOL_list_hd_hnr_mop[sepref_fr_rules] = HOL_list_hd_hnr[FCOMP mk_mop_rl1[OF mop_list_hd_alt]]
lemmas HOL_list_tl_hnr_mop[sepref_fr_rules] = HOL_list_tl_hnr[FCOMP mk_mop_rl1[OF mop_list_tl_alt]]
lemmas HOL_list_last_hnr_mop[sepref_fr_rules] = HOL_list_last_hnr[FCOMP mk_mop_rl1[OF mop_list_last_alt]]
lemmas HOL_list_butlast_hnr_mop[sepref_fr_rules] = HOL_list_butlast_hnr[FCOMP mk_mop_rl1[OF mop_list_butlast_alt]]
lemmas HOL_list_swap_hnr_mop[sepref_fr_rules] = HOL_list_swap_hnr[FCOMP mk_mop_rl3[OF mop_list_swap_alt]]
lemmas HOL_list_rotate1_hnr_mop[sepref_fr_rules] = HOL_list_rotate1_hnr[FCOMP mk_mop_rl1_np[OF mop_list_rotate1_alt]]
lemmas HOL_list_rev_hnr_mop[sepref_fr_rules] = HOL_list_rev_hnr[FCOMP mk_mop_rl1_np[OF mop_list_rev_alt]]
lemmas HOL_list_replicate_hnr_mop[sepref_fr_rules] = HOL_list_replicate_hnr[FCOMP mk_mop_rl2_np[OF mop_list_replicate_alt]]
lemmas HOL_list_get_hnr_mop[sepref_fr_rules] = HOL_list_get_hnr[FCOMP mk_mop_rl2[OF mop_list_get_alt]]
lemmas HOL_list_contains_hnr_mop[sepref_fr_rules] = HOL_list_contains_hnr[FCOMP mk_mop_rl2_np[OF mop_list_contains_alt]]
lemmas HOL_list_empty_hnr = HOL_list_empty_hnr_aux HOL_list_empty_hnr_mop
end
definition [simp]: "op_HOL_list_empty ≡ op_list_empty"
interpretation HOL_list: list_custom_empty "list_assn A" "return []" op_HOL_list_empty
apply unfold_locales
apply (sep_auto intro!: hfrefI hn_refineI)
by simp
schematic_goal
notes [sepref_fr_rules] = HOL_list_empty_hnr
shows
"hn_refine (emp) (?c::?'c Heap) ?Γ' ?R (do {
x ← RETURN [1,2,3::nat];
let x2 = op_list_append x 5;
ASSERT (length x = 4);
let x = op_list_swap x 1 2;
x ← mop_list_swap x 1 2;
RETURN (x@x)
})"
by sepref
end
Theory IICF_Array_List
theory IICF_Array_List
imports
"../Intf/IICF_List"
Separation_Logic_Imperative_HOL.Array_Blit
begin
type_synonym 'a array_list = "'a Heap.array × nat"
definition "is_array_list l ≡ λ(a,n). ∃⇩Al'. a ↦⇩a l' * ↑(n ≤ length l' ∧ l = take n l' ∧ length l'>0)"
lemma is_array_list_prec[safe_constraint_rules]: "precise is_array_list"
unfolding is_array_list_def[abs_def]
apply(rule preciseI)
apply(simp split: prod.splits)
using preciseD snga_prec by fastforce
definition "initial_capacity ≡ 16::nat"
definition "minimum_capacity ≡ 16::nat"
definition "arl_empty ≡ do {
a ← Array.new initial_capacity default;
return (a,0)
}"
definition "arl_empty_sz init_cap ≡ do {
a ← Array.new (max init_cap minimum_capacity) default;
return (a,0)
}"
definition "arl_append ≡ λ(a,n) x. do {
len ← Array.len a;
if n<len then do {
a ← Array.upd n x a;
return (a,n+1)
} else do {
let newcap = 2 * len;
a ← array_grow a newcap default;
a ← Array.upd n x a;
return (a,n+1)
}
}"
definition "arl_copy ≡ λ(a,n). do {
a ← array_copy a;
return (a,n)
}"
definition arl_length :: "'a::heap array_list ⇒ nat Heap" where
"arl_length ≡ λ(a,n). return (n)"
definition arl_is_empty :: "'a::heap array_list ⇒ bool Heap" where
"arl_is_empty ≡ λ(a,n). return (n=0)"
definition arl_last :: "'a::heap array_list ⇒ 'a Heap" where
"arl_last ≡ λ(a,n). do {
Array.nth a (n - 1)
}"
definition arl_butlast :: "'a::heap array_list ⇒ 'a array_list Heap" where
"arl_butlast ≡ λ(a,n). do {
let n = n - 1;
len ← Array.len a;
if (n*4 < len ∧ n*2≥minimum_capacity) then do {
a ← array_shrink a (n*2);
return (a,n)
} else
return (a,n)
}"
definition arl_get :: "'a::heap array_list ⇒ nat ⇒ 'a Heap" where
"arl_get ≡ λ(a,n) i. Array.nth a i"
definition arl_set :: "'a::heap array_list ⇒ nat ⇒ 'a ⇒ 'a array_list Heap" where
"arl_set ≡ λ(a,n) i x. do { a ← Array.upd i x a; return (a,n)}"
lemma arl_empty_rule[sep_heap_rules]: "< emp > arl_empty <is_array_list []>"
by (sep_auto simp: arl_empty_def is_array_list_def initial_capacity_def)
lemma arl_empty_sz_rule[sep_heap_rules]: "< emp > arl_empty_sz N <is_array_list []>"
by (sep_auto simp: arl_empty_sz_def is_array_list_def minimum_capacity_def)
lemma arl_copy_rule[sep_heap_rules]: "< is_array_list l a > arl_copy a <λr. is_array_list l a * is_array_list l r>"
by (sep_auto simp: arl_copy_def is_array_list_def)
lemma arl_append_rule[sep_heap_rules]: "
< is_array_list l a >
arl_append a x
<λa. is_array_list (l@[x]) a >⇩t"
by (sep_auto
simp: arl_append_def is_array_list_def take_update_last neq_Nil_conv
split: prod.splits nat.split)
lemma arl_length_rule[sep_heap_rules]: "
<is_array_list l a>
arl_length a
<λr. is_array_list l a * ↑(r=length l)>"
by (sep_auto simp: arl_length_def is_array_list_def)
lemma arl_is_empty_rule[sep_heap_rules]: "
<is_array_list l a>
arl_is_empty a
<λr. is_array_list l a * ↑(r⟷(l=[]))>"
by (sep_auto simp: arl_is_empty_def is_array_list_def)
lemma arl_last_rule[sep_heap_rules]: "
l≠[] ⟹
<is_array_list l a>
arl_last a
<λr. is_array_list l a * ↑(r=last l)>"
by (sep_auto simp: arl_last_def is_array_list_def last_take_nth_conv)
lemma arl_butlast_rule[sep_heap_rules]: "
l≠[] ⟹
<is_array_list l a>
arl_butlast a
<is_array_list (butlast l)>⇩t"
proof -
assume [simp]: "l≠[]"
have [simp]: "⋀x. min (x-Suc 0) ((x-Suc 0)*2) = x-Suc 0" by auto
show ?thesis
by (sep_auto
split: prod.splits
simp: arl_butlast_def is_array_list_def butlast_take minimum_capacity_def)
qed
lemma arl_get_rule[sep_heap_rules]: "
i<length l ⟹
<is_array_list l a>
arl_get a i
<λr. is_array_list l a * ↑(r=l!i)>"
by (sep_auto simp: arl_get_def is_array_list_def split: prod.split)
lemma arl_set_rule[sep_heap_rules]: "
i<length l ⟹
<is_array_list l a>
arl_set a i x
<is_array_list (l[i:=x])>"
by (sep_auto simp: arl_set_def is_array_list_def split: prod.split)
definition "arl_assn A ≡ hr_comp is_array_list (⟨the_pure A⟩list_rel)"
lemmas [safe_constraint_rules] = CN_FALSEI[of is_pure "arl_assn A" for A]
lemma arl_assn_comp: "is_pure A ⟹ hr_comp (arl_assn A) (⟨B⟩list_rel) = arl_assn (hr_comp A B)"
unfolding arl_assn_def
by (auto simp: hr_comp_the_pure hr_comp_assoc list_rel_compp)
lemma arl_assn_comp': "hr_comp (arl_assn id_assn) (⟨B⟩list_rel) = arl_assn (pure B)"
by (simp add: arl_assn_comp)
context
notes [fcomp_norm_unfold] = arl_assn_def[symmetric] arl_assn_comp'
notes [intro!] = hfrefI hn_refineI[THEN hn_refine_preI]
notes [simp] = pure_def hn_ctxt_def invalid_assn_def
begin
lemma arl_empty_hnr_aux: "(uncurry0 arl_empty,uncurry0 (RETURN op_list_empty)) ∈ unit_assn⇧k →⇩a is_array_list"
by sep_auto
sepref_decl_impl (no_register) arl_empty: arl_empty_hnr_aux .
lemma arl_empty_sz_hnr_aux: "(uncurry0 (arl_empty_sz N),uncurry0 (RETURN op_list_empty)) ∈ unit_assn⇧k →⇩a is_array_list"
by sep_auto
sepref_decl_impl (no_register) arl_empty_sz: arl_empty_sz_hnr_aux .
definition "op_arl_empty ≡ op_list_empty"
definition "op_arl_empty_sz (N::nat) ≡ op_list_empty"
lemma arl_copy_hnr_aux: "(arl_copy,RETURN o op_list_copy) ∈ is_array_list⇧k →⇩a is_array_list"
by sep_auto
sepref_decl_impl arl_copy: arl_copy_hnr_aux .
lemma arl_append_hnr_aux: "(uncurry arl_append,uncurry (RETURN oo op_list_append)) ∈ (is_array_list⇧d *⇩a id_assn⇧k) →⇩a is_array_list"
by sep_auto
sepref_decl_impl arl_append: arl_append_hnr_aux .
lemma arl_length_hnr_aux: "(arl_length,RETURN o op_list_length) ∈ is_array_list⇧k →⇩a nat_assn"
by sep_auto
sepref_decl_impl arl_length: arl_length_hnr_aux .
lemma arl_is_empty_hnr_aux: "(arl_is_empty,RETURN o op_list_is_empty) ∈ is_array_list⇧k →⇩a bool_assn"
by sep_auto
sepref_decl_impl arl_is_empty: arl_is_empty_hnr_aux .
lemma arl_last_hnr_aux: "(arl_last,RETURN o op_list_last) ∈ [pre_list_last]⇩a is_array_list⇧k → id_assn"
by sep_auto
sepref_decl_impl arl_last: arl_last_hnr_aux .
lemma arl_butlast_hnr_aux: "(arl_butlast,RETURN o op_list_butlast) ∈ [pre_list_butlast]⇩a is_array_list⇧d → is_array_list"
by sep_auto
sepref_decl_impl arl_butlast: arl_butlast_hnr_aux .
lemma arl_get_hnr_aux: "(uncurry arl_get,uncurry (RETURN oo op_list_get)) ∈ [λ(l,i). i<length l]⇩a (is_array_list⇧k *⇩a nat_assn⇧k) → id_assn"
by sep_auto
sepref_decl_impl arl_get: arl_get_hnr_aux .
lemma arl_set_hnr_aux: "(uncurry2 arl_set,uncurry2 (RETURN ooo op_list_set)) ∈ [λ((l,i),_). i<length l]⇩a (is_array_list⇧d *⇩a nat_assn⇧k *⇩a id_assn⇧k) → is_array_list"
by sep_auto
sepref_decl_impl arl_set: arl_set_hnr_aux .
sepref_definition arl_swap is "uncurry2 mop_list_swap" :: "((arl_assn id_assn)⇧d *⇩a nat_assn⇧k *⇩a nat_assn⇧k →⇩a arl_assn id_assn)"
unfolding gen_mop_list_swap[abs_def]
by sepref
sepref_decl_impl (ismop) arl_swap: arl_swap.refine .
end
interpretation arl: list_custom_empty "arl_assn A" arl_empty op_arl_empty
apply unfold_locales
apply (rule arl_empty_hnr)
by (auto simp: op_arl_empty_def)
lemma [def_pat_rules]: "op_arl_empty_sz$N ≡ UNPROTECT (op_arl_empty_sz N)" by simp
interpretation arl_sz: list_custom_empty "arl_assn A" "arl_empty_sz N" "PR_CONST (op_arl_empty_sz N)"
apply unfold_locales
apply (rule arl_empty_sz_hnr)
by (auto simp: op_arl_empty_sz_def)
end
Theory IICF_Impl_Heap
section ‹Implementation of Heaps with Arrays›
theory IICF_Impl_Heap
imports
IICF_Abs_Heap
"../IICF_HOL_List"
"../IICF_Array_List"
"HOL-Library.Rewrite"
begin
text ‹We implement the heap data structure by an array.
The implementation is automatically synthesized by the Sepref-tool.
›
subsection ‹Setup of the Sepref-Tool›
context
fixes prio :: "'a::{heap,default} ⇒ 'b::linorder"
begin
interpretation heapstruct prio .
definition "heap_rel A ≡ hr_comp (hr_comp (arl_assn id_assn) heap_rel1) (⟨the_pure A⟩mset_rel)"
end
locale heapstruct_impl =
fixes prio :: "'a::{heap,default} ⇒ 'b::linorder"
begin
sublocale heapstruct prio .
abbreviation "rel ≡ arl_assn id_assn"
sepref_register prio
lemma [sepref_import_param]: "(prio,prio) ∈ Id → Id" by simp
lemma [sepref_import_param]:
"((≤), (≤)::'b ⇒ _) ∈ Id → Id → bool_rel"
"((<), (<)::'b ⇒ _) ∈ Id → Id → bool_rel"
by simp_all
sepref_register
update_op
val_of_op
"PR_CONST prio_of_op"
exch_op
valid
"length::'a list ⇒ _"
append_op
butlast_op
"PR_CONST sink_op"
"PR_CONST swim_op"
"PR_CONST repair_op"
lemma [def_pat_rules]:
"heapstruct.prio_of_op$prio ≡ PR_CONST prio_of_op"
"heapstruct.sink_op$prio ≡ PR_CONST sink_op"
"heapstruct.swim_op$prio ≡ PR_CONST swim_op"
"heapstruct.repair_op$prio ≡ PR_CONST repair_op"
by simp_all
end
context
fixes prio :: "'a::{heap,default} ⇒ 'b::linorder"
begin
interpretation heapstruct_impl prio .
subsection ‹Synthesis of operations›
text ‹Note that we have to repeat some boilerplate per operation.
It is future work to add more automation here.›
sepref_definition update_impl is "uncurry2 update_op" :: "rel⇧d *⇩a nat_assn⇧k *⇩a id_assn⇧k →⇩a rel"
unfolding update_op_def[abs_def]
by sepref
lemmas [sepref_fr_rules] = update_impl.refine
sepref_definition val_of_impl is "uncurry val_of_op" :: "rel⇧k *⇩a nat_assn⇧k →⇩a id_assn"
unfolding val_of_op_def[abs_def]
by sepref
lemmas [sepref_fr_rules] = val_of_impl.refine
sepref_definition exch_impl is "uncurry2 exch_op" :: "rel⇧d *⇩a nat_assn⇧k *⇩a nat_assn⇧k →⇩a rel"
unfolding exch_op_def[abs_def]
by sepref
lemmas [sepref_fr_rules] = exch_impl.refine
sepref_definition valid_impl is "uncurry (RETURN oo valid)" :: "rel⇧k *⇩a nat_assn⇧k →⇩a bool_assn"
unfolding valid_def[abs_def]
by sepref
lemmas [sepref_fr_rules] = valid_impl.refine
sepref_definition prio_of_impl is "uncurry (PR_CONST prio_of_op)" :: "rel⇧k *⇩a nat_assn⇧k →⇩a id_assn"
unfolding prio_of_op_def[abs_def] PR_CONST_def
by sepref
lemmas [sepref_fr_rules] = prio_of_impl.refine
sepref_definition swim_impl is "uncurry (PR_CONST swim_op)" :: "rel⇧d *⇩a nat_assn⇧k →⇩a rel"
unfolding swim_op_def[abs_def] parent_def PR_CONST_def
by sepref
lemmas [sepref_fr_rules] = swim_impl.refine
sepref_definition sink_impl is "uncurry (PR_CONST sink_op)" :: "rel⇧d *⇩a nat_assn⇧k →⇩a rel"
unfolding sink_op_opt_def[abs_def] sink_op_opt_eq[symmetric,abs_def] PR_CONST_def
by sepref
lemmas [sepref_fr_rules] = sink_impl.refine
lemmas [fcomp_norm_unfold] = heap_rel_def[symmetric]
sepref_definition empty_impl is "uncurry0 empty_op" :: "unit_assn⇧k →⇩a rel"
unfolding empty_op_def arl.fold_custom_empty
by sepref
sepref_decl_impl (no_register) heap_empty: empty_impl.refine[FCOMP empty_op_refine] .
sepref_definition is_empty_impl is "is_empty_op" :: "rel⇧k →⇩a bool_assn"
unfolding is_empty_op_def[abs_def]
by sepref
sepref_decl_impl heap_is_empty: is_empty_impl.refine[FCOMP is_empty_op_refine] .
sepref_definition insert_impl is "uncurry insert_op" :: "id_assn⇧k *⇩a rel⇧d →⇩a rel"
unfolding insert_op_def[abs_def] append_op_def
by sepref
sepref_decl_impl heap_insert: insert_impl.refine[FCOMP insert_op_refine] .
sepref_definition pop_min_impl is "pop_min_op" :: "rel⇧d →⇩a prod_assn id_assn rel"
unfolding pop_min_op_def[abs_def] butlast_op_def
by sepref
sepref_decl_impl (no_mop) heap_pop_min: pop_min_impl.refine[FCOMP pop_min_op_refine] .
sepref_definition peek_min_impl is "peek_min_op" :: "rel⇧k →⇩a id_assn"
unfolding peek_min_op_def[abs_def]
by sepref
sepref_decl_impl (no_mop) heap_peek_min: peek_min_impl.refine[FCOMP peek_min_op_refine] .
end
definition [simp]: "heap_custom_empty ≡ op_mset_empty"
interpretation heap: mset_custom_empty
"heap_rel prio A" empty_impl heap_custom_empty for prio A
apply unfold_locales
apply (rule heap_empty_hnr)
by simp
subsection ‹Regression Test›
export_code empty_impl is_empty_impl insert_impl pop_min_impl peek_min_impl checking SML
definition "sort_by_prio prio l ≡ do {
q ← nfoldli l (λ_. True) (λx q. mop_mset_insert x q) heap_custom_empty;
(l,q) ← WHILET (λ(l,q). ¬op_mset_is_empty q) (λ(l,q). do {
(x,q) ← mop_prio_pop_min prio q;
RETURN (l@[x],q)
}) (op_arl_empty,q);
RETURN l
}"
context fixes prio:: "'a::{default,heap} ⇒ 'b::linorder" begin
sepref_definition sort_impl is
"sort_by_prio prio" :: "(list_assn (id_assn::'a::{default,heap} ⇒ _))⇧k →⇩a arl_assn id_assn"
unfolding sort_by_prio_def[abs_def]
by sepref
end
definition "sort_impl_nat ≡ sort_impl (id::nat⇒nat) "
export_code sort_impl checking SML
ML ‹
@{code sort_impl_nat} (map @{code nat_of_integer} [4,1,7,2,3,9,8,62]) ()
›
hide_const sort_impl sort_impl_nat
hide_fact sort_impl_def sort_impl_nat_def sort_impl.refine
end
Theory IICF_Map
section ‹Map Interface›
theory IICF_Map
imports "../../Sepref"
begin
subsection ‹Parametricity for Maps›
definition [to_relAPP]: "map_rel K V ≡ (K → ⟨V⟩option_rel)
∩ { (mi,m). dom mi ⊆ Domain K ∧ dom m ⊆ Range K }"
lemma bi_total_map_rel_eq:
"⟦IS_RIGHT_TOTAL K; IS_LEFT_TOTAL K⟧ ⟹ ⟨K,V⟩map_rel = K → ⟨V⟩option_rel"
unfolding map_rel_def IS_RIGHT_TOTAL_def IS_LEFT_TOTAL_def
by (auto dest: fun_relD)
lemma map_rel_Id[simp]: "⟨Id,Id⟩map_rel = Id"
unfolding map_rel_def by auto
lemma map_rel_empty1_simp[simp]:
"(Map.empty,m)∈⟨K,V⟩map_rel ⟷ m=Map.empty"
apply (auto simp: map_rel_def)
by (meson RangeE domIff option_rel_simp(1) subsetCE tagged_fun_relD_none)
lemma map_rel_empty2_simp[simp]:
"(m,Map.empty)∈⟨K,V⟩map_rel ⟷ m=Map.empty"
apply (auto simp: map_rel_def)
by (meson Domain.cases domIff fun_relD2 option_rel_simp(2) subset_eq)
lemma map_rel_obtain1:
assumes 1: "(m,n)∈⟨K,V⟩map_rel"
assumes 2: "n l = Some w"
obtains k v where "m k = Some v" "(k,l)∈K" "(v,w)∈V"
using 1 unfolding map_rel_def
proof clarsimp
assume R: "(m, n) ∈ K → ⟨V⟩option_rel"
assume "dom n ⊆ Range K"
with 2 obtain k where "(k,l)∈K" by auto
moreover from fun_relD[OF R this] have "(m k, n l) ∈ ⟨V⟩option_rel" .
with 2 obtain v where "m k = Some v" "(v,w)∈V" by (cases "m k"; auto)
ultimately show thesis by - (rule that)
qed
lemma map_rel_obtain2:
assumes 1: "(m,n)∈⟨K,V⟩map_rel"
assumes 2: "m k = Some v"
obtains l w where "n l = Some w" "(k,l)∈K" "(v,w)∈V"
using 1 unfolding map_rel_def
proof clarsimp
assume R: "(m, n) ∈ K → ⟨V⟩option_rel"
assume "dom m ⊆ Domain K"
with 2 obtain l where "(k,l)∈K" by auto
moreover from fun_relD[OF R this] have "(m k, n l) ∈ ⟨V⟩option_rel" .
with 2 obtain w where "n l = Some w" "(v,w)∈V" by (cases "n l"; auto)
ultimately show thesis by - (rule that)
qed
lemma param_dom[param]: "(dom,dom)∈⟨K,V⟩map_rel → ⟨K⟩set_rel"
apply (clarsimp simp: set_rel_def; safe)
apply (erule (1) map_rel_obtain2; auto)
apply (erule (1) map_rel_obtain1; auto)
done
subsection ‹Interface Type›
sepref_decl_intf ('k,'v) i_map is "'k ⇀ 'v"
lemma [synth_rules]: "⟦INTF_OF_REL K TYPE('k); INTF_OF_REL V TYPE('v)⟧
⟹ INTF_OF_REL (⟨K,V⟩map_rel) TYPE(('k,'v) i_map)" by simp
subsection ‹Operations›
sepref_decl_op map_empty: "Map.empty" :: "⟨K,V⟩map_rel" .
sepref_decl_op map_is_empty: "(=) Map.empty" :: "⟨K,V⟩map_rel → bool_rel"
apply (rule fref_ncI)
apply parametricity
apply (rule fun_relI; auto)
done
sepref_decl_op map_update: "λk v m. m(k↦v)" :: "K → V → ⟨K,V⟩map_rel → ⟨K,V⟩map_rel"
where "single_valued K" "single_valued (K¯)"
apply (rule fref_ncI)
apply parametricity
unfolding map_rel_def
apply (intro fun_relI)
apply (elim IntE; rule IntI)
apply (intro fun_relI)
apply parametricity
apply (simp add: pres_eq_iff_svb)
apply auto
done
sepref_decl_op map_delete: "λk m. fun_upd m k None" :: "K → ⟨K,V⟩map_rel → ⟨K,V⟩map_rel"
where "single_valued K" "single_valued (K¯)"
apply (rule fref_ncI)
apply parametricity
unfolding map_rel_def
apply (intro fun_relI)
apply (elim IntE; rule IntI)
apply (intro fun_relI)
apply parametricity
apply (simp add: pres_eq_iff_svb)
apply auto
done
sepref_decl_op map_lookup: "λk (m::'k⇀'v). m k" :: "K → ⟨K,V⟩map_rel → ⟨V⟩option_rel"
apply (rule fref_ncI)
apply parametricity
unfolding map_rel_def
apply (intro fun_relI)
apply (elim IntE)
apply parametricity
done
lemma in_dom_alt: "k∈dom m ⟷ ¬is_None (m k)" by (auto split: option.split)
sepref_decl_op map_contains_key: "λk m. k∈dom m" :: "K → ⟨K,V⟩map_rel → bool_rel"
unfolding in_dom_alt
apply (rule fref_ncI)
apply parametricity
unfolding map_rel_def
apply (elim IntE)
apply parametricity
done
subsection ‹Patterns›
lemma pat_map_empty[pat_rules]: "λ⇩2_. None ≡ op_map_empty" by simp
lemma pat_map_is_empty[pat_rules]:
"(=) $m$(λ⇩2_. None) ≡ op_map_is_empty$m"
"(=) $(λ⇩2_. None)$m ≡ op_map_is_empty$m"
"(=) $(dom$m)${} ≡ op_map_is_empty$m"
"(=) ${}$(dom$m) ≡ op_map_is_empty$m"
unfolding atomize_eq
by (auto dest: sym)
lemma pat_map_update[pat_rules]:
"fun_upd$m$k$(Some$v) ≡ op_map_update$'k$'v$'m"
by simp
lemma pat_map_lookup[pat_rules]: "m$k ≡ op_map_lookup$'k$'m"
by simp
lemma op_map_delete_pat[pat_rules]:
"(|`) $ m $ (uminus $ (insert $ k $ {})) ≡ op_map_delete$'k$'m"
"fun_upd$m$k$None ≡ op_map_delete$'k$'m"
by (simp_all add: map_upd_eq_restrict)
lemma op_map_contains_key[pat_rules]:
"(∈) $ k $ (dom$m) ≡ op_map_contains_key$'k$'m"
"Not$((=) $(m$k)$None) ≡ op_map_contains_key$'k$'m"
by (auto intro!: eq_reflection)
subsection ‹Parametricity›
locale map_custom_empty =
fixes op_custom_empty :: "'k⇀'v"
assumes op_custom_empty_def: "op_custom_empty = op_map_empty"
begin
sepref_register op_custom_empty :: "('kx,'vx) i_map"
lemma fold_custom_empty:
"Map.empty = op_custom_empty"
"op_map_empty = op_custom_empty"
"mop_map_empty = RETURN op_custom_empty"
unfolding op_custom_empty_def by simp_all
end
end
Theory IICF_Prio_Map
section ‹Priority Maps›
theory IICF_Prio_Map
imports IICF_Map
begin
text ‹This interface inherits from maps, and adds some operations›
lemma uncurry_fun_rel_conv:
"(uncurry f, uncurry g) ∈ A×⇩rB → R ⟷ (f,g)∈A→B→R"
by (auto simp: uncurry_def dest!: fun_relD intro: prod_relI)
lemma uncurry0_fun_rel_conv:
"(uncurry0 f, uncurry0 g) ∈ unit_rel → R ⟷ (f,g)∈R"
by (auto dest!: fun_relD)
lemma RETURN_rel_conv0: "(RETURN f, RETURN g)∈⟨A⟩nres_rel ⟷ (f,g)∈A"
by (auto simp: nres_rel_def)
lemma RETURN_rel_conv1: "(RETURN o f, RETURN o g)∈A → ⟨B⟩nres_rel ⟷ (f,g)∈A→B"
by (auto simp: nres_rel_def dest!: fun_relD)
lemma RETURN_rel_conv2: "(RETURN oo f, RETURN oo g)∈A → B → ⟨R⟩nres_rel ⟷ (f,g)∈A→B→R"
by (auto simp: nres_rel_def dest!: fun_relD)
lemma RETURN_rel_conv3: "(RETURN ooo f, RETURN ooo g)∈A→B→C → ⟨R⟩nres_rel ⟷ (f,g)∈A→B→C→R"
by (auto simp: nres_rel_def dest!: fun_relD)
lemmas fref2param_unfold =
uncurry_fun_rel_conv uncurry0_fun_rel_conv
RETURN_rel_conv0 RETURN_rel_conv1 RETURN_rel_conv2 RETURN_rel_conv3
lemmas param_op_map_update[param] = op_map_update.fref[THEN fref_ncD, unfolded fref2param_unfold]
lemmas param_op_map_delete[param] = op_map_delete.fref[THEN fref_ncD, unfolded fref2param_unfold]
lemmas param_op_map_is_empty[param] = op_map_is_empty.fref[THEN fref_ncD, unfolded fref2param_unfold]
subsection ‹Additional Operations›
sepref_decl_op map_update_new: "op_map_update" :: "[λ((k,v),m). k∉dom m]⇩f (K×⇩rV)×⇩r⟨K,V⟩map_rel → ⟨K,V⟩map_rel"
where "single_valued K" "single_valued (K¯)" .
sepref_decl_op map_update_ex: "op_map_update" :: "[λ((k,v),m). k∈dom m]⇩f (K×⇩rV)×⇩r⟨K,V⟩map_rel → ⟨K,V⟩map_rel"
where "single_valued K" "single_valued (K¯)" .
sepref_decl_op map_delete_ex: "op_map_delete" :: "[λ(k,m). k∈dom m]⇩f K×⇩r⟨K,V⟩map_rel → ⟨K,V⟩map_rel"
where "single_valued K" "single_valued (K¯)" .
context
fixes prio :: "'v ⇒ 'p::linorder"
begin
sepref_decl_op pm_decrease_key: "op_map_update"
:: "[λ((k,v),m). k∈dom m ∧ prio v ≤ prio (the (m k))]⇩f (K×⇩rV)×⇩r⟨K,V⟩map_rel → ⟨K,(V::('v×'v) set)⟩map_rel"
where "single_valued K" "single_valued (K¯)" "IS_BELOW_ID V"
proof goal_cases
case 1
have [param]: "((≤),(≤))∈Id→Id→bool_rel" by simp
from 1 show ?case
apply (parametricity add: param_and_cong1)
apply (auto simp: IS_BELOW_ID_def map_rel_def dest!: fun_relD)
done
qed
sepref_decl_op pm_increase_key: "op_map_update"
:: "[λ((k,v),m). k∈dom m ∧ prio v ≥ prio (the (m k))]⇩f (K×⇩rV)×⇩r⟨K,V⟩map_rel → ⟨K,(V::('v×'v) set)⟩map_rel"
where "single_valued K" "single_valued (K¯)" "IS_BELOW_ID V"
proof goal_cases
case 1
have [param]: "((≤),(≤))∈Id→Id→bool_rel" by simp
from 1 show ?case
apply (parametricity add: param_and_cong1)
apply (auto simp: IS_BELOW_ID_def map_rel_def dest!: fun_relD)
done
qed
lemma IS_BELOW_ID_D: "(a,b)∈R ⟹ IS_BELOW_ID R ⟹ a=b" by (auto simp: IS_BELOW_ID_def)
sepref_decl_op pm_peek_min: "λm. SPEC (λ(k,v).
m k = Some v ∧ (∀k' v'. m k' = Some v' ⟶ prio v ≤ prio v'))"
:: "[Not o op_map_is_empty]⇩f ⟨K,V⟩map_rel → K×⇩r(V::('v×'v) set)"
where "IS_BELOW_ID V"
apply (rule frefI)
apply (intro nres_relI)
apply (clarsimp simp: pw_le_iff refine_pw_simps)
apply (rule map_rel_obtain2, assumption, assumption)
apply1 (intro exI conjI allI impI; assumption?)
proof -
fix x y k' v' b w
assume "(x, y) ∈ ⟨K, V⟩map_rel" "y k' = Some v'"
then obtain k v where "(k,k')∈K" "(v,v')∈V" "x k = Some v"
by (rule map_rel_obtain1)
assume "IS_BELOW_ID V" "(b, w) ∈ V"
with ‹(v,v')∈V› have [simp]: "b=w" "v=v'" by (auto simp: IS_BELOW_ID_def)
assume "∀k' v'. x k' = Some v' ⟶ prio b ≤ prio v'"
with ‹x k = Some v› show "prio w ≤ prio v'"
by auto
qed
sepref_decl_op pm_pop_min: "λm. SPEC (λ((k,v),m').
m k = Some v
∧ m' = op_map_delete k m
∧ (∀k' v'. m k' = Some v' ⟶ prio v ≤ prio v')
)" :: "[Not o op_map_is_empty]⇩f ⟨K,V⟩map_rel → (K×⇩r(V::('v×'v) set))×⇩r⟨K,V⟩map_rel"
where "single_valued K" "single_valued (K¯)" "IS_BELOW_ID V"
apply (rule frefI)
apply (intro nres_relI)
apply (clarsimp simp: pw_le_iff refine_pw_simps simp del: op_map_delete_def)
apply (rule map_rel_obtain2, assumption, assumption)
apply (intro exI conjI allI impI; assumption?)
applyS parametricity
proof -
fix x y k' v' b w
assume "(x, y) ∈ ⟨K, V⟩map_rel" "y k' = Some v'"
then obtain k v where "(k,k')∈K" "(v,v')∈V" "x k = Some v"
by (rule map_rel_obtain1)
assume "IS_BELOW_ID V" "(b, w) ∈ V"
with ‹(v,v')∈V› have [simp]: "b=w" "v=v'" by (auto simp: IS_BELOW_ID_def)
assume "∀k' v'. x k' = Some v' ⟶ prio b ≤ prio v'"
with ‹x k = Some v› show "prio w ≤ prio v'"
by auto
qed
end
end
Theory IICF_Abs_Heapmap
section ‹Priority Maps implemented with List and Map›
theory IICF_Abs_Heapmap
imports IICF_Abs_Heap "HOL-Library.Rewrite" "../../Intf/IICF_Prio_Map"
begin
type_synonym ('k,'v) ahm = "'k list × ('k ⇀ 'v)"
subsection ‹Basic Setup›
text ‹First, we define a mapping to list-based heaps›
definition hmr_α :: "('k,'v) ahm ⇒ 'v heap" where
"hmr_α ≡ λ(pq,m). map (the o m) pq"
definition "hmr_invar ≡ λ(pq,m). distinct pq ∧ dom m = set pq"
definition "hmr_rel ≡ br hmr_α hmr_invar"
lemmas hmr_rel_defs = hmr_rel_def br_def hmr_α_def hmr_invar_def
lemma hmr_empty_invar[simp]: "hmr_invar ([],Map.empty)"
by (auto simp: hmr_invar_def)
locale hmstruct = h: heapstruct prio for prio :: "'v ⇒ 'b::linorder"
begin
text ‹Next, we define a mapping to priority maps.›
definition heapmap_α :: "('k,'v) ahm ⇒ ('k ⇀ 'v)" where
"heapmap_α ≡ λ(pq,m). m"
definition heapmap_invar :: "('k,'v) ahm ⇒ bool" where
"heapmap_invar ≡ λhm. hmr_invar hm ∧ h.heap_invar (hmr_α hm)"
definition "heapmap_rel ≡ br heapmap_α heapmap_invar"
lemmas heapmap_rel_defs = heapmap_rel_def br_def heapmap_α_def heapmap_invar_def
lemma [refine_dref_RELATES]: "RELATES hmr_rel" by (simp add: RELATES_def)
lemma h_heap_invarI[simp]: "heapmap_invar hm ⟹ h.heap_invar (hmr_α hm)"
by (simp add: heapmap_invar_def)
lemma hmr_invarI[simp]: "heapmap_invar hm ⟹ hmr_invar hm"
unfolding heapmap_invar_def by blast
lemma set_hmr_α[simp]: "hmr_invar hm ⟹ set (hmr_α hm) = ran (heapmap_α hm)"
apply (clarsimp simp: hmr_α_def hmr_invar_def heapmap_α_def
eq_commute[of "dom _" "set _"] ran_def)
apply force
done
lemma in_h_hmr_α_conv[simp]: "hmr_invar hm ⟹ x ∈# h.α (hmr_α hm) ⟷ x ∈ ran (heapmap_α hm)"
by (force simp: hmr_α_def hmr_invar_def heapmap_α_def in_multiset_in_set ran_is_image)
subsection ‹Basic Operations›
text ‹In this section, we define the basic operations on heapmaps,
and their relations to heaps and maps.›
subsubsection ‹Length›
text ‹Length of the list that represents the heap›
definition hm_length :: "('k,'v) ahm ⇒ nat" where
"hm_length ≡ λ(pq,_). length pq"
lemma hm_length_refine: "(hm_length, length) ∈ hmr_rel → nat_rel"
apply (intro fun_relI)
unfolding hm_length_def
by (auto simp: hmr_rel_defs)
lemma hm_length_hmr_α[simp]: "length (hmr_α hm) = hm_length hm"
by (auto simp: hm_length_def hmr_α_def split: prod.splits)
lemmas [refine] = hm_length_refine[param_fo]
subsubsection ‹Valid›
text ‹Check whether index is valid›
definition "hm_valid hm i ≡ i>0 ∧ i≤ hm_length hm"
lemma hm_valid_refine: "(hm_valid,h.valid)∈hmr_rel → nat_rel → bool_rel"
apply (intro fun_relI)
unfolding hm_valid_def h.valid_def
by (parametricity add: hm_length_refine)
lemma hm_valid_hmr_α[simp]: "h.valid (hmr_α hm) = hm_valid hm"
by (intro ext) (auto simp: h.valid_def hm_valid_def)
subsubsection ‹Key-Of›
definition hm_key_of :: "('k,'v) ahm ⇒ nat ⇒ 'k" where
"hm_key_of ≡ λ(pq,m) i. pq!(i - 1)"
definition hm_key_of_op :: "('k,'v) ahm ⇒ nat ⇒ 'k nres" where
"hm_key_of_op ≡ λ(pq,m) i. ASSERT (i>0) ⪢ mop_list_get pq (i - 1)"
lemma hm_key_of_op_unfold:
shows "hm_key_of_op hm i = ASSERT (hm_valid hm i) ⪢ RETURN (hm_key_of hm i)"
unfolding hm_valid_def hm_length_def hm_key_of_op_def hm_key_of_def
by (auto split: prod.splits simp: pw_eq_iff refine_pw_simps)
lemma val_of_hmr_α[simp]: "hm_valid hm i ⟹ h.val_of (hmr_α hm) i
= the (heapmap_α hm (hm_key_of hm i))"
by (auto
simp: hmr_α_def h.val_of_def heapmap_α_def hm_key_of_def hm_valid_def hm_length_def
split: prod.splits)
lemma hm_α_key_ex[simp]:
"⟦hmr_invar hm; hm_valid hm i⟧ ⟹ (heapmap_α hm (hm_key_of hm i) ≠ None)"
unfolding heapmap_invar_def hmr_invar_def hm_valid_def heapmap_α_def
hm_key_of_def hm_length_def
by (auto split: prod.splits)
subsubsection ‹Lookup›
abbreviation (input) hm_lookup where "hm_lookup ≡ heapmap_α"
definition "hm_the_lookup_op hm k ≡
ASSERT (heapmap_α hm k ≠ None ∧ hmr_invar hm)
⪢ RETURN (the (heapmap_α hm k))"
subsubsection ‹Exchange›
text ‹Exchange two indices›
definition "hm_exch_op ≡ λ(pq,m) i j. do {
ASSERT (hm_valid (pq,m) i);
ASSERT (hm_valid (pq,m) j);
ASSERT (hmr_invar (pq,m));
pq ← mop_list_swap pq (i - 1) (j - 1);
RETURN (pq,m)
}"
lemma hm_exch_op_invar: "hm_exch_op hm i j ≤⇩n SPEC hmr_invar"
unfolding hm_exch_op_def h.exch_op_def h.val_of_op_def h.update_op_def
apply simp
apply refine_vcg
apply (auto simp: hm_valid_def map_swap hm_length_def hmr_rel_defs)
done
lemma hm_exch_op_refine: "(hm_exch_op,h.exch_op) ∈ hmr_rel → nat_rel → nat_rel → ⟨hmr_rel⟩nres_rel"
apply (intro fun_relI nres_relI)
unfolding hm_exch_op_def h.exch_op_def h.val_of_op_def h.update_op_def
apply simp
apply refine_vcg
apply (auto simp: hm_valid_def map_swap hm_length_def hmr_rel_defs)
done
lemmas hm_exch_op_refine'[refine] = hm_exch_op_refine[param_fo, THEN nres_relD]
definition hm_exch :: "('k,'v) ahm ⇒ nat ⇒ nat ⇒ ('k,'v) ahm"
where "hm_exch ≡ λ(pq,m) i j. (swap pq (i-1) (j-1),m)"
lemma hm_exch_op_α_correct: "hm_exch_op hm i j ≤⇩n SPEC (λhm'.
hm_valid hm i ∧ hm_valid hm j ∧ hm'=hm_exch hm i j
)"
unfolding hm_exch_op_def
apply refine_vcg
apply (vc_solve simp: hm_valid_def hm_length_def heapmap_α_def solve: asm_rl)
apply (auto simp add: hm_key_of_def hm_exch_def swap_def) []
done
lemma hm_exch_α[simp]: "heapmap_α (hm_exch hm i j) = (heapmap_α hm)"
by (auto simp: heapmap_α_def hm_exch_def split: prod.splits)
lemma hm_exch_valid[simp]: "hm_valid (hm_exch hm i j) = hm_valid hm"
by (intro ext) (auto simp: hm_valid_def hm_length_def hm_exch_def split: prod.splits)
lemma hm_exch_length[simp]: "hm_length (hm_exch hm i j) = hm_length hm"
by (auto simp: hm_length_def hm_exch_def split: prod.splits)
lemma hm_exch_same[simp]: "hm_exch hm i i = hm"
by (auto simp: hm_exch_def split: prod.splits)
lemma hm_key_of_exch_conv[simp]:
"⟦hm_valid hm i; hm_valid hm j; hm_valid hm k⟧ ⟹
hm_key_of (hm_exch hm i j) k = (
if k=i then hm_key_of hm j
else if k=j then hm_key_of hm i
else hm_key_of hm k
)"
unfolding hm_exch_def hm_valid_def hm_length_def hm_key_of_def
by (auto split: prod.splits)
lemma hm_key_of_exch_matching[simp]:
"⟦hm_valid hm i; hm_valid hm j⟧ ⟹ hm_key_of (hm_exch hm i j) i = hm_key_of hm j"
"⟦hm_valid hm i; hm_valid hm j⟧ ⟹ hm_key_of (hm_exch hm i j) j = hm_key_of hm i"
by simp_all
subsubsection ‹Index›
text ‹Obtaining the index of a key›
definition "hm_index ≡ λ(pq,m) k. index pq k + 1"
lemma hm_index_valid[simp]: "⟦hmr_invar hm; heapmap_α hm k ≠ None⟧ ⟹ hm_valid hm (hm_index hm k)"
by (force simp: hm_valid_def heapmap_α_def hmr_invar_def hm_index_def hm_length_def Suc_le_eq)
lemma hm_index_key_of[simp]: "⟦hmr_invar hm; heapmap_α hm k ≠ None⟧ ⟹ hm_key_of hm (hm_index hm k) = k"
by (force
simp: hm_valid_def heapmap_α_def hmr_invar_def hm_index_def hm_length_def hm_key_of_def Suc_le_eq)
definition "hm_index_op ≡ λ(pq,m) k.
do {
ASSERT (hmr_invar (pq,m) ∧ heapmap_α (pq,m) k ≠ None);
i ← mop_list_index pq k;
RETURN (i+1)
}"
lemma hm_index_op_correct:
assumes "hmr_invar hm"
assumes "heapmap_α hm k ≠ None"
shows "hm_index_op hm k ≤ SPEC (λr. r= hm_index hm k)"
using assms unfolding hm_index_op_def
apply refine_vcg
apply (auto simp: heapmap_α_def hmr_invar_def hm_index_def index_nth_id)
done
lemmas [refine_vcg] = hm_index_op_correct
subsubsection ‹Update›
text ‹Updating the heap at an index›
definition hm_update_op :: "('k,'v) ahm ⇒ nat ⇒ 'v ⇒ ('k,'v) ahm nres" where
"hm_update_op ≡ λ(pq,m) i v. do {
ASSERT (hm_valid (pq,m) i ∧ hmr_invar (pq,m));
k ← mop_list_get pq (i - 1);
RETURN (pq, m(k ↦ v))
}"
lemma hm_update_op_invar: "hm_update_op hm k v ≤⇩n SPEC hmr_invar"
unfolding hm_update_op_def h.update_op_def
apply refine_vcg
by (auto simp: hmr_rel_defs map_distinct_upd_conv hm_valid_def hm_length_def)
lemma hm_update_op_refine: "(hm_update_op, h.update_op) ∈ hmr_rel → nat_rel → Id → ⟨hmr_rel⟩nres_rel"
apply (intro fun_relI nres_relI)
unfolding hm_update_op_def h.update_op_def mop_list_get_alt mop_list_set_alt
apply refine_vcg
apply (auto simp: hmr_rel_defs map_distinct_upd_conv hm_valid_def hm_length_def)
done
lemmas [refine] = hm_update_op_refine[param_fo, THEN nres_relD]
lemma hm_update_op_α_correct:
assumes "hmr_invar hm"
assumes "heapmap_α hm k ≠ None"
shows "hm_update_op hm (hm_index hm k) v ≤⇩n SPEC (λhm'. heapmap_α hm' = (heapmap_α hm)(k↦v))"
using assms
unfolding hm_update_op_def
apply refine_vcg
apply (force simp: heapmap_rel_defs hmr_rel_defs hm_index_def)
done
subsubsection ‹Butlast›
text ‹Remove last element›
definition hm_butlast_op :: "('k,'v) ahm ⇒ ('k,'v) ahm nres" where
"hm_butlast_op ≡ λ(pq,m). do {
ASSERT (hmr_invar (pq,m));
k ← mop_list_get pq (length pq - 1);
pq ← mop_list_butlast pq;
let m = m(k:=None);
RETURN (pq,m)
}"
lemma hm_butlast_op_refine: "(hm_butlast_op, h.butlast_op) ∈ hmr_rel → ⟨hmr_rel⟩nres_rel"
supply [simp del] = map_upd_eq_restrict
apply (intro fun_relI nres_relI)
unfolding hm_butlast_op_def h.butlast_op_def
apply simp
apply refine_vcg
apply (clarsimp_all simp: hmr_rel_defs map_butlast distinct_butlast)
apply (auto simp: neq_Nil_rev_conv) []
done
lemmas [refine] = hm_butlast_op_refine[param_fo, THEN nres_relD]
lemma hm_butlast_op_α_correct: "hm_butlast_op hm ≤⇩n SPEC (
λhm'. heapmap_α hm' = (heapmap_α hm)( hm_key_of hm (hm_length hm) := None ))"
unfolding hm_butlast_op_def
apply refine_vcg
apply (auto simp: heapmap_α_def hm_key_of_def hm_length_def)
done
subsubsection ‹Append›
text ‹Append new element at end of heap›
definition hm_append_op :: "('k,'v) ahm ⇒ 'k ⇒ 'v ⇒ ('k,'v) ahm nres"
where "hm_append_op ≡ λ(pq,m) k v. do {
ASSERT (k ∉ dom m);
ASSERT (hmr_invar (pq,m));
pq ← mop_list_append pq k;
let m = m (k ↦ v);
RETURN (pq,m)
}"
lemma hm_append_op_invar: "hm_append_op hm k v ≤⇩n SPEC hmr_invar"
unfolding hm_append_op_def h.append_op_def
apply refine_vcg
unfolding heapmap_α_def hmr_rel_defs
apply (auto simp: )
done
lemma hm_append_op_refine: "⟦ heapmap_α hm k = None; (hm,h)∈hmr_rel ⟧
⟹ (hm_append_op hm k v, h.append_op h v) ∈ ⟨hmr_rel⟩nres_rel"
apply (intro fun_relI nres_relI)
unfolding hm_append_op_def h.append_op_def
apply refine_vcg
unfolding heapmap_α_def hmr_rel_defs
apply (auto simp: )
done
lemmas hm_append_op_refine'[refine] = hm_append_op_refine[param_fo, THEN nres_relD]
lemma hm_append_op_α_correct:
"hm_append_op hm k v ≤⇩n SPEC (λhm'. heapmap_α hm' = (heapmap_α hm) (k ↦ v))"
unfolding hm_append_op_def
apply refine_vcg
by (auto simp: heapmap_α_def)
subsection ‹Auxiliary Operations›
text ‹Auxiliary operations on heapmaps, which are derived
from the basic operations, but do not correspond to
operations of the priority map interface›
text ‹We start with some setup›
lemma heapmap_hmr_relI: "(hm,h)∈heapmap_rel ⟹ (hm,hmr_α hm) ∈ hmr_rel"
by (auto simp: heapmap_rel_defs hmr_rel_defs)
lemma heapmap_hmr_relI': "heapmap_invar hm ⟹ (hm,hmr_α hm) ∈ hmr_rel"
by (auto simp: heapmap_rel_defs hmr_rel_defs)
text ‹The basic principle how we prove correctness of our operations:
Invariant preservation is shown by relating the operations to
operations on heaps. Then, only correctness on the abstraction
remains to be shown, assuming the operation does not fail.
›
lemma heapmap_nres_relI':
assumes "hm ≤ ⇓hmr_rel h'"
assumes "h' ≤ SPEC (h.heap_invar)"
assumes "hm ≤⇩n SPEC (λhm'. RETURN (heapmap_α hm') ≤ h)"
shows "hm ≤ ⇓heapmap_rel h"
using assms
unfolding heapmap_rel_defs hmr_rel_def
by (auto simp: pw_le_iff pw_leof_iff refine_pw_simps)
lemma heapmap_nres_relI'':
assumes "hm ≤ ⇓hmr_rel h'"
assumes "h' ≤ SPEC Φ"
assumes "⋀h'. Φ h' ⟹ h.heap_invar h'"
assumes "hm ≤⇩n SPEC (λhm'. RETURN (heapmap_α hm') ≤ h)"
shows "hm ≤ ⇓heapmap_rel h"
apply (rule heapmap_nres_relI')
apply fact
apply (rule order_trans, fact)
apply (clarsimp; fact)
apply fact
done
subsubsection ‹Val-of›
text ‹Indexing into the heap›
definition hm_val_of_op :: "('k,'v) ahm ⇒ nat ⇒ 'v nres" where
"hm_val_of_op ≡ λhm i. do {
k ← hm_key_of_op hm i;
v ← hm_the_lookup_op hm k;
RETURN v
}"
lemma hm_val_of_op_refine: "(hm_val_of_op,h.val_of_op) ∈ (hmr_rel → nat_rel → ⟨Id⟩nres_rel)"
apply (intro fun_relI nres_relI)
unfolding hm_val_of_op_def h.val_of_op_def
hm_key_of_op_def hm_key_of_def hm_valid_def hm_length_def
hm_the_lookup_op_def
apply clarsimp
apply (rule refine_IdD)
apply refine_vcg
apply (auto simp: hmr_rel_defs heapmap_α_def)
done
lemmas [refine] = hm_val_of_op_refine[param_fo, THEN nres_relD]
subsubsection ‹Prio-of›
text ‹Priority of key›
definition "hm_prio_of_op h i ≡ do {v ← hm_val_of_op h i; RETURN (prio v)}"
lemma hm_prio_of_op_refine: "(hm_prio_of_op, h.prio_of_op) ∈ hmr_rel → nat_rel → ⟨Id⟩nres_rel"
apply (intro fun_relI nres_relI)
unfolding hm_prio_of_op_def h.prio_of_op_def
apply refine_rcg
by auto
lemmas hm_prio_of_op_refine'[refine] = hm_prio_of_op_refine[param_fo, THEN nres_relD]
subsubsection ‹Swim›
definition hm_swim_op :: "('k,'v) ahm ⇒ nat ⇒ ('k,'v) ahm nres" where
"hm_swim_op h i ≡ do {
RECT (λswim (h,i). do {
ASSERT (hm_valid h i ∧ h.swim_invar (hmr_α h) i);
if hm_valid h (h.parent i) then do {
ppi ← hm_prio_of_op h (h.parent i);
pi ← hm_prio_of_op h i;
if (¬ppi ≤ pi) then do {
h ← hm_exch_op h i (h.parent i);
swim (h, h.parent i)
} else
RETURN h
} else
RETURN h
}) (h,i)
}"
lemma hm_swim_op_refine: "(hm_swim_op, h.swim_op) ∈ hmr_rel → nat_rel → ⟨hmr_rel⟩nres_rel"
apply (intro fun_relI nres_relI)
unfolding hm_swim_op_def h.swim_op_def
apply refine_rcg
apply refine_dref_type
apply (clarsimp_all simp: hm_valid_refine[param_fo, THEN IdD])
apply (simp add: hmr_rel_def in_br_conv)
done
lemmas hm_swim_op_refine'[refine] = hm_swim_op_refine[param_fo, THEN nres_relD]
lemma hm_swim_op_nofail_imp_valid:
"nofail (hm_swim_op hm i) ⟹ hm_valid hm i ∧ h.swim_invar (hmr_α hm) i"
unfolding hm_swim_op_def
apply (subst (asm) RECT_unfold, refine_mono)
by (auto simp: refine_pw_simps)
lemma hm_swim_op_α_correct: "hm_swim_op hm i ≤⇩n SPEC (λhm'. heapmap_α hm' = heapmap_α hm)"
apply (rule leof_add_nofailI)
apply (drule hm_swim_op_nofail_imp_valid)
unfolding hm_swim_op_def
apply (rule RECT_rule_leof[where
pre="λ(hm',i). hm_valid hm' i ∧ heapmap_α hm' = heapmap_α hm"
and V = "inv_image less_than snd"
])
apply simp
apply simp
unfolding hm_prio_of_op_def hm_val_of_op_def
hm_exch_op_def hm_key_of_op_def hm_the_lookup_op_def
apply (refine_vcg)
apply (vc_solve simp add: hm_valid_def hm_length_def)
apply rprems
apply (vc_solve simp: heapmap_α_def h.parent_def)
done
subsubsection ‹Sink›
definition hm_sink_op
where
"hm_sink_op h k ≡ RECT (λD (h,k). do {
ASSERT (k>0 ∧ k≤hm_length h);
let len = hm_length h;
if (2*k ≤ len) then do {
let j = 2*k;
pj ← hm_prio_of_op h j;
j ← (
if j<len then do {
psj ← hm_prio_of_op h (Suc j);
if pj>psj then RETURN (j+1) else RETURN j
} else RETURN j);
pj ← hm_prio_of_op h j;
pk ← hm_prio_of_op h k;
if (pk > pj) then do {
h ← hm_exch_op h k j;
D (h,j)
} else
RETURN h
} else RETURN h
}) (h,k)"
lemma hm_sink_op_refine: "(hm_sink_op, h.sink_op) ∈ hmr_rel → nat_rel → ⟨hmr_rel⟩nres_rel"
apply (intro fun_relI nres_relI)
unfolding hm_sink_op_def h.sink_op_opt_eq[symmetric] h.sink_op_opt_def
apply refine_rcg
apply refine_dref_type
unfolding hmr_rel_def heapmap_rel_def
apply (clarsimp_all simp: in_br_conv)
done
lemmas hm_sink_op_refine'[refine] = hm_sink_op_refine[param_fo, THEN nres_relD]
lemma hm_sink_op_nofail_imp_valid: "nofail (hm_sink_op hm i) ⟹ hm_valid hm i"
unfolding hm_sink_op_def
apply (subst (asm) RECT_unfold, refine_mono)
by (auto simp: refine_pw_simps hm_valid_def)
lemma hm_sink_op_α_correct: "hm_sink_op hm i ≤⇩n SPEC (λhm'. heapmap_α hm' = heapmap_α hm)"
apply (rule leof_add_nofailI)
apply (drule hm_sink_op_nofail_imp_valid)
unfolding hm_sink_op_def
apply (rule RECT_rule_leof[where
pre="λ(hm',i). hm_valid hm' i ∧ heapmap_α hm' = heapmap_α hm ∧ hm_length hm' = hm_length hm"
and V = "measure (λ(hm',i). hm_length hm' - i)"
])
apply simp
apply simp
unfolding hm_prio_of_op_def hm_val_of_op_def hm_exch_op_def
hm_key_of_op_def hm_the_lookup_op_def
apply (refine_vcg)
apply (vc_solve simp add: hm_valid_def hm_length_def)
apply rprems
apply (vc_solve simp: heapmap_α_def h.parent_def split: prod.splits)
apply (auto)
done
subsubsection ‹Repair›
definition "hm_repair_op hm i ≡ do {
hm ← hm_sink_op hm i;
hm ← hm_swim_op hm i;
RETURN hm
}"
lemma hm_repair_op_refine: "(hm_repair_op, h.repair_op) ∈ hmr_rel → nat_rel → ⟨hmr_rel⟩nres_rel"
apply (intro fun_relI nres_relI)
unfolding hm_repair_op_def h.repair_op_def
by refine_rcg
lemmas hm_repair_op_refine'[refine] = hm_repair_op_refine[param_fo, THEN nres_relD]
lemma hm_repair_op_α_correct: "hm_repair_op hm i ≤⇩n SPEC (λhm'. heapmap_α hm' = heapmap_α hm)"
unfolding hm_repair_op_def
apply (refine_vcg
hm_swim_op_α_correct[THEN leof_trans]
hm_sink_op_α_correct[THEN leof_trans])
by auto
subsection ‹Operations›
text ‹In this section, we define the operations that implement the priority-map interface›
subsubsection ‹Empty›
definition hm_empty_op :: "('k,'v) ahm nres"
where "hm_empty_op ≡ RETURN ([],Map.empty)"
lemma hm_empty_aref: "(hm_empty_op,RETURN op_map_empty) ∈ ⟨heapmap_rel⟩nres_rel"
unfolding hm_empty_op_def
by (auto simp: heapmap_rel_defs hmr_rel_defs intro: nres_relI)
subsubsection ‹Insert›
definition hm_insert_op :: "'k ⇒ 'v ⇒ ('k,'v) ahm ⇒ ('k,'v) ahm nres" where
"hm_insert_op ≡ λk v h. do {
ASSERT (h.heap_invar (hmr_α h));
h ← hm_append_op h k v;
let l = hm_length h;
h ← hm_swim_op h l;
RETURN h
}"
lemma hm_insert_op_refine[refine]: "⟦ heapmap_α hm k = None; (hm,h)∈hmr_rel ⟧ ⟹
hm_insert_op k v hm ≤ ⇓hmr_rel (h.insert_op v h)"
unfolding hm_insert_op_def h.insert_op_def
apply refine_rcg
by (auto simp: hmr_rel_def br_def)
lemma hm_insert_op_aref:
"(hm_insert_op,mop_map_update_new) ∈ Id → Id → heapmap_rel → ⟨heapmap_rel⟩nres_rel"
apply (intro fun_relI nres_relI)
unfolding mop_map_update_new_alt
apply (rule ASSERT_refine_right)
apply (rule heapmap_nres_relI''[OF hm_insert_op_refine h.insert_op_correct])
apply (unfold heapmap_rel_def in_br_conv; clarsimp)
apply (erule heapmap_hmr_relI)
apply (unfold heapmap_rel_def in_br_conv; clarsimp)
apply (unfold heapmap_rel_def in_br_conv; clarsimp)
unfolding hm_insert_op_def
apply (refine_vcg
hm_append_op_α_correct[THEN leof_trans]
hm_swim_op_α_correct[THEN leof_trans])
apply (unfold heapmap_rel_def in_br_conv; clarsimp)
done
subsubsection ‹Is-Empty›
lemma hmr_α_empty_iff[simp]:
"hmr_invar hm ⟹ hmr_α hm = [] ⟷ heapmap_α hm = Map.empty"
by (auto
simp: hmr_α_def heapmap_invar_def heapmap_α_def hmr_invar_def
split: prod.split)
definition hm_is_empty_op :: "('k,'v) ahm ⇒ bool nres" where
"hm_is_empty_op ≡ λhm. do {
ASSERT (hmr_invar hm);
let l = hm_length hm;
RETURN (l=0)
}"
lemma hm_is_empty_op_refine: "(hm_is_empty_op, h.is_empty_op) ∈ hmr_rel → ⟨bool_rel⟩nres_rel"
apply (intro fun_relI nres_relI)
unfolding hm_is_empty_op_def h.is_empty_op_def
apply refine_rcg
apply (auto simp: hmr_rel_defs) []
apply (parametricity add: hm_length_refine)
done
lemma hm_is_empty_op_aref: "(hm_is_empty_op, RETURN o op_map_is_empty) ∈ heapmap_rel → ⟨bool_rel⟩nres_rel"
apply (intro fun_relI nres_relI)
unfolding hm_is_empty_op_def
apply refine_vcg
apply (auto simp: hmr_rel_defs heapmap_rel_defs hm_length_def)
done
subsubsection ‹Lookup›
definition hm_lookup_op :: "'k ⇒ ('k,'v) ahm ⇒ 'v option nres"
where "hm_lookup_op ≡ λk hm. ASSERT (heapmap_invar hm) ⪢ RETURN (hm_lookup hm k)"
lemma hm_lookup_op_aref: "(hm_lookup_op,RETURN oo op_map_lookup) ∈ Id → heapmap_rel → ⟨⟨Id⟩option_rel⟩nres_rel"
apply (intro fun_relI nres_relI)
unfolding hm_lookup_op_def heapmap_rel_def in_br_conv
apply refine_vcg
apply simp_all
done
subsubsection ‹Contains-Key›
definition "hm_contains_key_op ≡ λk (pq,m). ASSERT (heapmap_invar (pq,m)) ⪢ RETURN (k∈dom m)"
lemma hm_contains_key_op_aref: "(hm_contains_key_op,RETURN oo op_map_contains_key) ∈ Id → heapmap_rel → ⟨bool_rel⟩nres_rel"
apply (intro fun_relI nres_relI)
unfolding hm_contains_key_op_def heapmap_rel_defs
apply refine_vcg
by (auto)
subsubsection ‹Decrease-Key›
definition "hm_decrease_key_op ≡ λk v hm. do {
ASSERT (heapmap_invar hm);
ASSERT (heapmap_α hm k ≠ None ∧ prio v ≤ prio (the (heapmap_α hm k)));
i ← hm_index_op hm k;
hm ← hm_update_op hm i v;
hm_swim_op hm i
}"
definition (in heapstruct) "decrease_key_op i v h ≡ do {
ASSERT (valid h i ∧ prio v ≤ prio_of h i);
h ← update_op h i v;
swim_op h i
}"
lemma (in heapstruct) decrease_key_op_invar:
"⟦heap_invar h; valid h i; prio v ≤ prio_of h i⟧ ⟹ decrease_key_op i v h ≤ SPEC heap_invar"
unfolding decrease_key_op_def
apply refine_vcg
by (auto simp: swim_invar_decr)
lemma index_op_inline_refine:
assumes "heapmap_invar hm"
assumes "heapmap_α hm k ≠ None"
assumes "f (hm_index hm k) ≤ m"
shows "do {i ← hm_index_op hm k; f i} ≤ m"
using hm_index_op_correct[of hm k] assms
by (auto simp: pw_le_iff refine_pw_simps)
lemma hm_decrease_key_op_refine:
"⟦(hm,h)∈hmr_rel; (hm,m)∈heapmap_rel; m k = Some v'⟧
⟹ hm_decrease_key_op k v hm ≤⇓hmr_rel (h.decrease_key_op (hm_index hm k) v h)"
unfolding hm_decrease_key_op_def h.decrease_key_op_def
apply (refine_rcg index_op_inline_refine)
unfolding hmr_rel_def heapmap_rel_def in_br_conv
apply (clarsimp_all)
done
lemma hm_index_op_inline_leof:
assumes "f (hm_index hm k) ≤⇩n m"
shows "do {i ← hm_index_op hm k; f i} ≤⇩n m"
using hm_index_op_correct[of hm k] assms unfolding hm_index_op_def
by (auto simp: pw_le_iff pw_leof_iff refine_pw_simps split: prod.splits)
lemma hm_decrease_key_op_α_correct:
"heapmap_invar hm ⟹ hm_decrease_key_op k v hm ≤⇩n SPEC (λhm'. heapmap_α hm' = heapmap_α hm(k↦v))"
unfolding hm_decrease_key_op_def
apply (refine_vcg
hm_update_op_α_correct[THEN leof_trans]
hm_swim_op_α_correct[THEN leof_trans]
hm_index_op_inline_leof
)
apply simp_all
done
lemma hm_decrease_key_op_aref:
"(hm_decrease_key_op, PR_CONST (mop_pm_decrease_key prio)) ∈ Id → Id → heapmap_rel → ⟨heapmap_rel⟩nres_rel"
unfolding PR_CONST_def
apply (intro fun_relI nres_relI)
apply (frule heapmap_hmr_relI)
unfolding mop_pm_decrease_key_alt
apply (rule ASSERT_refine_right; clarsimp)
apply (rule heapmap_nres_relI')
apply (rule hm_decrease_key_op_refine; assumption)
unfolding heapmap_rel_def hmr_rel_def in_br_conv
apply (rule h.decrease_key_op_invar; simp; fail )
apply (refine_vcg hm_decrease_key_op_α_correct[THEN leof_trans]; simp; fail)
done
subsubsection ‹Increase-Key›
definition "hm_increase_key_op ≡ λk v hm. do {
ASSERT (heapmap_invar hm);
ASSERT (heapmap_α hm k ≠ None ∧ prio v ≥ prio (the (heapmap_α hm k)));
i ← hm_index_op hm k;
hm ← hm_update_op hm i v;
hm_sink_op hm i
}"
definition (in heapstruct) "increase_key_op i v h ≡ do {
ASSERT (valid h i ∧ prio v ≥ prio_of h i);
h ← update_op h i v;
sink_op h i
}"
lemma (in heapstruct) increase_key_op_invar:
"⟦heap_invar h; valid h i; prio v ≥ prio_of h i⟧ ⟹ increase_key_op i v h ≤ SPEC heap_invar"
unfolding increase_key_op_def
apply refine_vcg
by (auto simp: sink_invar_incr)
lemma hm_increase_key_op_refine:
"⟦(hm,h)∈hmr_rel; (hm,m)∈heapmap_rel; m k = Some v'⟧
⟹ hm_increase_key_op k v hm ≤⇓hmr_rel (h.increase_key_op (hm_index hm k) v h)"
unfolding hm_increase_key_op_def h.increase_key_op_def
apply (refine_rcg index_op_inline_refine)
unfolding hmr_rel_def heapmap_rel_def in_br_conv
apply (clarsimp_all)
done
lemma hm_increase_key_op_α_correct:
"heapmap_invar hm ⟹ hm_increase_key_op k v hm ≤⇩n SPEC (λhm'. heapmap_α hm' = heapmap_α hm(k↦v))"
unfolding hm_increase_key_op_def
apply (refine_vcg
hm_update_op_α_correct[THEN leof_trans]
hm_sink_op_α_correct[THEN leof_trans]
hm_index_op_inline_leof)
apply simp_all
done
lemma hm_increase_key_op_aref:
"(hm_increase_key_op, PR_CONST (mop_pm_increase_key prio)) ∈ Id → Id → heapmap_rel → ⟨heapmap_rel⟩nres_rel"
unfolding PR_CONST_def
apply (intro fun_relI nres_relI)
apply (frule heapmap_hmr_relI)
unfolding mop_pm_increase_key_alt
apply (rule ASSERT_refine_right; clarsimp)
apply (rule heapmap_nres_relI')
apply (rule hm_increase_key_op_refine; assumption)
unfolding heapmap_rel_def hmr_rel_def in_br_conv
apply (rule h.increase_key_op_invar; simp; fail )
apply (refine_vcg hm_increase_key_op_α_correct[THEN leof_trans]; simp)
done
subsubsection ‹Change-Key›
definition "hm_change_key_op ≡ λk v hm. do {
ASSERT (heapmap_invar hm);
ASSERT (heapmap_α hm k ≠ None);
i ← hm_index_op hm k;
hm ← hm_update_op hm i v;
hm_repair_op hm i
}"
definition (in heapstruct) "change_key_op i v h ≡ do {
ASSERT (valid h i);
h ← update_op h i v;
repair_op h i
}"
lemma (in heapstruct) change_key_op_invar:
"⟦heap_invar h; valid h i⟧ ⟹ change_key_op i v h ≤ SPEC heap_invar"
unfolding change_key_op_def
apply (refine_vcg)
apply hypsubst
apply refine_vcg
by (auto simp: sink_invar_incr)
lemma hm_change_key_op_refine:
"⟦(hm,h)∈hmr_rel; (hm,m)∈heapmap_rel; m k = Some v'⟧
⟹ hm_change_key_op k v hm ≤⇓hmr_rel (h.change_key_op (hm_index hm k) v h)"
unfolding hm_change_key_op_def h.change_key_op_def
apply (refine_rcg index_op_inline_refine)
unfolding hmr_rel_def heapmap_rel_def in_br_conv
apply (clarsimp_all)
done
lemma hm_change_key_op_α_correct:
"heapmap_invar hm ⟹ hm_change_key_op k v hm ≤⇩n SPEC (λhm'. heapmap_α hm' = heapmap_α hm(k↦v))"
unfolding hm_change_key_op_def
apply (refine_vcg
hm_update_op_α_correct[THEN leof_trans]
hm_repair_op_α_correct[THEN leof_trans]
hm_index_op_inline_leof)
unfolding heapmap_rel_def in_br_conv
apply simp
apply simp
done
lemma hm_change_key_op_aref:
"(hm_change_key_op, mop_map_update_ex) ∈ Id → Id → heapmap_rel → ⟨heapmap_rel⟩nres_rel"
apply (intro fun_relI nres_relI)
apply (frule heapmap_hmr_relI)
unfolding mop_map_update_ex_alt
apply (rule ASSERT_refine_right; clarsimp)
apply (rule heapmap_nres_relI')
apply (rule hm_change_key_op_refine; assumption)
unfolding heapmap_rel_def hmr_rel_def in_br_conv
apply (rule h.change_key_op_invar; simp; fail )
apply ((refine_vcg hm_change_key_op_α_correct[THEN leof_trans]; simp))
done
subsubsection ‹Set›
text ‹Realized as generic algorithm!›
lemma (in -) op_pm_set_gen_impl: "RETURN ooo op_map_update = (λk v m. do {
c ← RETURN (op_map_contains_key k m);
if c then
mop_map_update_ex k v m
else
mop_map_update_new k v m
})"
apply (intro ext)
unfolding op_map_contains_key_def mop_map_update_ex_def mop_map_update_new_def
by simp
definition "hm_set_op k v hm ≡ do {
c ← hm_contains_key_op k hm;
if c then
hm_change_key_op k v hm
else
hm_insert_op k v hm
}"
lemma hm_set_op_aref:
"(hm_set_op, RETURN ooo op_map_update) ∈ Id → Id → heapmap_rel → ⟨heapmap_rel⟩nres_rel"
unfolding op_pm_set_gen_impl
apply (intro fun_relI nres_relI)
unfolding hm_set_op_def o_def
apply (refine_rcg
hm_contains_key_op_aref[param_fo, unfolded o_def, THEN nres_relD]
hm_change_key_op_aref[param_fo, THEN nres_relD]
hm_insert_op_aref[param_fo, THEN nres_relD]
)
by auto
subsubsection ‹Pop-Min›
definition hm_pop_min_op :: "('k,'v) ahm ⇒ (('k×'v) × ('k,'v) ahm) nres" where
"hm_pop_min_op hm ≡ do {
ASSERT (heapmap_invar hm);
ASSERT (hm_valid hm 1);
k ← hm_key_of_op hm 1;
v ← hm_the_lookup_op hm k;
let l = hm_length hm;
hm ← hm_exch_op hm 1 l;
hm ← hm_butlast_op hm;
if (l≠1) then do {
hm ← hm_sink_op hm 1;
RETURN ((k,v),hm)
} else RETURN ((k,v),hm)
}"
lemma hm_pop_min_op_refine:
"(hm_pop_min_op, h.pop_min_op) ∈ hmr_rel → ⟨UNIV ×⇩r hmr_rel⟩nres_rel"
apply (intro fun_relI nres_relI)
unfolding hm_pop_min_op_def h.pop_min_op_def
unfolding ignore_snd_refine_conv hm_the_lookup_op_def hm_key_of_op_unfold
apply (simp cong: if_cong add: Let_def)
apply (simp add: unused_bind_conv h.val_of_op_def refine_pw_simps)
apply refine_rcg
unfolding hmr_rel_def in_br_conv
apply (unfold heapmap_invar_def;simp)
apply (auto simp: in_br_conv)
done
text ‹We demonstrate two different approaches for proving correctness
here.
The first approach uses the relation to plain heaps only to establish
the invariant.
The second approach also uses the relation to heaps to establish
correctness of the result.
The first approach seems to be more robust against badly set
up simpsets, which may be the case in early stages of development.
Assuming a working simpset, the second approach may be less work,
and the proof may look more elegant.
›
text_raw ‹\paragraph{First approach}›
text ‹Transfer heapmin-property to heapmap-domain›
lemma heapmap_min_prop:
assumes INV: "heapmap_invar hm"
assumes V': "heapmap_α hm k = Some v'"
assumes NE: "hm_valid hm (Suc 0)"
shows "prio (the (heapmap_α hm (hm_key_of hm (Suc 0)))) ≤ prio v'"
proof -
obtain pq m where [simp]: "hm=(pq,m)" by (cases hm)
from NE have [simp]: "pq≠[]" by (auto simp: hm_valid_def hm_length_def)
have CNV_LHS: "prio (the (heapmap_α hm (hm_key_of hm (Suc 0))))
= h.prio_of (hmr_α hm) (Suc 0)"
by (auto simp: heapmap_α_def hm_key_of_def hmr_α_def h.val_of_def)
from INV have INV': "h.heap_invar (hmr_α hm)"
unfolding heapmap_invar_def by auto
from V' INV obtain i where IDX: "h.valid (hmr_α hm) i"
and CNV_RHS: "prio v' = h.prio_of (hmr_α hm) i"
apply (clarsimp simp: heapmap_α_def heapmap_invar_def hmr_invar_def hmr_α_def
h.valid_def h.val_of_def)
by (metis (no_types, hide_lams) Suc_leI comp_apply diff_Suc_Suc
diff_zero domI index_less_size_conv neq0_conv nth_index nth_map
old.nat.distinct(2) option.sel)
from h.heap_min_prop[OF INV' IDX] show ?thesis
unfolding CNV_LHS CNV_RHS .
qed
text ‹With the above lemma, the correctness proof is straightforward›
lemma hm_pop_min_α_correct: "hm_pop_min_op hm ≤⇩n SPEC (λ((k,v),hm').
heapmap_α hm k = Some v
∧ heapmap_α hm' = (heapmap_α hm)(k:=None)
∧ (∀k' v'. heapmap_α hm k' = Some v' ⟶ prio v ≤ prio v'))"
unfolding hm_pop_min_op_def hm_key_of_op_unfold hm_the_lookup_op_def
apply (refine_vcg
hm_exch_op_α_correct[THEN leof_trans]
hm_butlast_op_α_correct[THEN leof_trans]
hm_sink_op_α_correct[THEN leof_trans]
)
apply (auto simp: heapmap_min_prop)
done
lemma heapmap_nres_rel_prodI:
assumes "hmx ≤ ⇓(UNIV ×⇩r hmr_rel) h'x"
assumes "h'x ≤ SPEC (λ(_,h'). h.heap_invar h')"
assumes "hmx ≤⇩n SPEC (λ(r,hm'). RETURN (r,heapmap_α hm') ≤ ⇓(R×⇩rId) hx)"
shows "hmx ≤ ⇓(R×⇩rheapmap_rel) hx"
using assms
unfolding heapmap_rel_def hmr_rel_def br_def heapmap_invar_def
apply (auto simp: pw_le_iff pw_leof_iff refine_pw_simps; blast)
done
lemma hm_pop_min_op_aref: "(hm_pop_min_op, PR_CONST (mop_pm_pop_min prio)) ∈ heapmap_rel → ⟨(Id×⇩rId)×⇩rheapmap_rel⟩nres_rel"
unfolding PR_CONST_def
apply (intro fun_relI nres_relI)
apply (frule heapmap_hmr_relI)
unfolding mop_pm_pop_min_alt
apply (intro ASSERT_refine_right)
apply (rule heapmap_nres_rel_prodI)
apply (rule hm_pop_min_op_refine[param_fo, THEN nres_relD]; assumption)
unfolding heapmap_rel_def hmr_rel_def in_br_conv
apply (refine_vcg; simp)
apply (refine_vcg hm_pop_min_α_correct[THEN leof_trans]; simp split: prod.splits)
done
text_raw ‹\paragraph{Second approach}›
definition "hm_kv_of_op hm i ≡ do {
ASSERT (hm_valid hm i ∧ hmr_invar hm);
k ← hm_key_of_op hm i;
v ← hm_the_lookup_op hm k;
RETURN (k, v)
}"
definition "kvi_rel hm i ≡ {((k,v),v) | k v. hm_key_of hm i = k}"
lemma hm_kv_op_refine[refine]:
assumes "(hm,h)∈hmr_rel"
shows "hm_kv_of_op hm i ≤ ⇓(kvi_rel hm i) (h.val_of_op h i)"
unfolding hm_kv_of_op_def h.val_of_op_def kvi_rel_def
hm_key_of_op_unfold hm_the_lookup_op_def
apply simp
apply refine_vcg
using assms
by (auto
simp: hm_valid_def hm_length_def hmr_rel_defs heapmap_α_def hm_key_of_def
split: prod.splits)
definition hm_pop_min_op' :: "('k,'v) ahm ⇒ (('k×'v) × ('k,'v) ahm) nres" where
"hm_pop_min_op' hm ≡ do {
ASSERT (heapmap_invar hm);
ASSERT (hm_valid hm 1);
kv ← hm_kv_of_op hm 1;
let l = hm_length hm;
hm ← hm_exch_op hm 1 l;
hm ← hm_butlast_op hm;
if (l≠1) then do {
hm ← hm_sink_op hm 1;
RETURN (kv,hm)
} else RETURN (kv,hm)
}"
lemma hm_pop_min_op_refine':
"⟦ (hm,h)∈hmr_rel ⟧ ⟹ hm_pop_min_op' hm ≤ ⇓(kvi_rel hm 1 ×⇩r hmr_rel) (h.pop_min_op h)"
unfolding hm_pop_min_op'_def h.pop_min_op_def
unfolding ignore_snd_refine_conv
apply refine_rcg
unfolding hmr_rel_def heapmap_rel_def
apply (unfold heapmap_invar_def; simp add: in_br_conv)
apply (simp_all add: in_br_conv)
done
lemma heapmap_nres_rel_prodI':
assumes "hmx ≤ ⇓(S ×⇩r hmr_rel) h'x"
assumes "h'x ≤ SPEC Φ"
assumes "⋀h' r. Φ (r,h') ⟹ h.heap_invar h'"
assumes "hmx ≤⇩n SPEC (λ(r,hm'). (∃r'. (r,r')∈S ∧ Φ (r',hmr_α hm')) ∧ hmr_invar hm' ⟶ RETURN (r,heapmap_α hm') ≤ ⇓(R×⇩rId) hx)"
shows "hmx ≤ ⇓(R×⇩rheapmap_rel) hx"
using assms
unfolding heapmap_rel_def hmr_rel_def heapmap_invar_def
apply (auto
simp: pw_le_iff pw_leof_iff refine_pw_simps in_br_conv
)
by meson
lemma ex_in_kvi_rel_conv:
"(∃r'. (r,r')∈kvi_rel hm i ∧ Φ r') ⟷ (fst r = hm_key_of hm i ∧ Φ (snd r))"
unfolding kvi_rel_def
apply (cases r)
apply auto
done
lemma hm_pop_min_aref': "(hm_pop_min_op', mop_pm_pop_min prio) ∈ heapmap_rel → ⟨(Id×⇩rId) ×⇩r heapmap_rel⟩nres_rel"
apply (intro fun_relI nres_relI)
apply (frule heapmap_hmr_relI)
unfolding mop_pm_pop_min_alt
apply (intro ASSERT_refine_right)
apply (rule heapmap_nres_rel_prodI')
apply (erule hm_pop_min_op_refine')
apply (unfold heapmap_rel_def hmr_rel_def in_br_conv) []
apply (rule h.pop_min_op_correct)
apply simp
apply simp
apply simp
apply (clarsimp simp: ex_in_kvi_rel_conv split: prod.splits)
unfolding hm_pop_min_op'_def hm_kv_of_op_def hm_key_of_op_unfold
hm_the_lookup_op_def
apply (refine_vcg
hm_exch_op_α_correct[THEN leof_trans]
hm_butlast_op_α_correct[THEN leof_trans]
hm_sink_op_α_correct[THEN leof_trans]
)
unfolding heapmap_rel_def hmr_rel_def in_br_conv
apply (auto intro: ranI)
done
subsubsection ‹Remove›
definition "hm_remove_op k hm ≡ do {
ASSERT (heapmap_invar hm);
ASSERT (k ∈ dom (heapmap_α hm));
i ← hm_index_op hm k;
let l = hm_length hm;
hm ← hm_exch_op hm i l;
hm ← hm_butlast_op hm;
if i ≠ l then
hm_repair_op hm i
else
RETURN hm
}"
definition (in heapstruct) "remove_op i h ≡ do {
ASSERT (heap_invar h);
ASSERT (valid h i);
let l = length h;
h ← exch_op h i l;
h ← butlast_op h;
if i ≠ l then
repair_op h i
else
RETURN h
}"
lemma (in -) swap_empty_iff[iff]: "swap l i j = [] ⟷ l=[]"
by (auto simp: swap_def)
lemma (in heapstruct)
butlast_exch_last: "butlast (exch h i (length h)) = update (butlast h) i (last h)"
unfolding exch_def update_def
apply (cases h rule: rev_cases)
apply (auto simp: swap_def butlast_list_update)
done
lemma (in heapstruct) remove_op_invar:
"⟦ heap_invar h; valid h i ⟧ ⟹ remove_op i h ≤ SPEC heap_invar"
unfolding remove_op_def
apply refine_vcg
apply (auto simp: valid_def) []
apply (auto simp: valid_def exch_def) []
apply (simp add: butlast_exch_last)
apply refine_vcg
apply auto []
apply auto []
apply (auto simp: valid_def) []
apply auto []
apply auto []
done
lemma hm_remove_op_refine[refine]:
"⟦ (hm,m)∈heapmap_rel; (hm,h)∈hmr_rel; heapmap_α hm k ≠ None⟧ ⟹
hm_remove_op k hm ≤ ⇓hmr_rel (h.remove_op (hm_index hm k) h)"
unfolding hm_remove_op_def h.remove_op_def heapmap_rel_def
apply (refine_rcg index_op_inline_refine)
unfolding hmr_rel_def
apply (auto simp: in_br_conv)
done
lemma hm_remove_op_α_correct:
"hm_remove_op k hm ≤⇩n SPEC (λhm'. heapmap_α hm' = (heapmap_α hm)(k:=None))"
unfolding hm_remove_op_def
apply (refine_vcg
hm_exch_op_α_correct[THEN leof_trans]
hm_butlast_op_α_correct[THEN leof_trans]
hm_repair_op_α_correct[THEN leof_trans]
hm_index_op_inline_leof
)
apply (auto; fail)
apply clarsimp
apply (rewrite at "hm_index _ k = hm_length _" in asm eq_commute)
apply (auto; fail)
done
lemma hm_remove_op_aref:
"(hm_remove_op,mop_map_delete_ex) ∈ Id → heapmap_rel → ⟨heapmap_rel⟩nres_rel"
apply (intro fun_relI nres_relI)
unfolding mop_map_delete_ex_alt
apply (rule ASSERT_refine_right)
apply (frule heapmap_hmr_relI)
apply (rule heapmap_nres_relI')
apply (rule hm_remove_op_refine; assumption?)
apply (unfold heapmap_rel_def in_br_conv; auto)
unfolding heapmap_rel_def hmr_rel_def in_br_conv
apply (refine_vcg h.remove_op_invar; clarsimp; fail)
apply (refine_vcg hm_remove_op_α_correct[THEN leof_trans]; simp; fail)
done
subsubsection ‹Peek-Min›
definition hm_peek_min_op :: "('k,'v) ahm ⇒ ('k×'v) nres" where
"hm_peek_min_op hm ≡ hm_kv_of_op hm 1"
lemma hm_peek_min_op_aref:
"(hm_peek_min_op, PR_CONST (mop_pm_peek_min prio)) ∈ heapmap_rel → ⟨Id×⇩rId⟩nres_rel"
unfolding PR_CONST_def
apply (intro fun_relI nres_relI)
proof -
fix hm and m :: "'k ⇀ 'v"
assume A: "(hm,m)∈heapmap_rel"
from A have [simp]: "h.heap_invar (hmr_α hm)" "hmr_invar hm" "m=heapmap_α hm"
unfolding heapmap_rel_def in_br_conv heapmap_invar_def
by simp_all
have "hm_peek_min_op hm ≤ ⇓ (kvi_rel hm 1) (h.peek_min_op (hmr_α hm))"
unfolding hm_peek_min_op_def h.peek_min_op_def
apply (refine_rcg hm_kv_op_refine)
using A
apply (simp add: heapmap_hmr_relI)
done
also have "⟦hmr_α hm ≠ []⟧ ⟹ (h.peek_min_op (hmr_α hm))
≤ SPEC (λv. v∈ran (heapmap_α hm) ∧ (∀v'∈ran (heapmap_α hm). prio v ≤ prio v'))"
apply refine_vcg
by simp_all
finally show "hm_peek_min_op hm ≤ ⇓ (Id ×⇩r Id) (mop_pm_peek_min prio m)"
unfolding mop_pm_peek_min_alt
apply (simp add: pw_le_iff refine_pw_simps hm_peek_min_op_def hm_kv_of_op_def
hm_key_of_op_unfold hm_the_lookup_op_def)
apply (fastforce simp: kvi_rel_def ran_def)
done
qed
end
end
Theory IICF_Array
section ‹Plain Arrays Implementing List Interface›
theory IICF_Array
imports "../Intf/IICF_List"
begin
text ‹Lists of fixed length are directly implemented with arrays. ›
definition "is_array l p ≡ p↦⇩al"
lemma is_array_precise[safe_constraint_rules]: "precise is_array"
apply rule
unfolding is_array_def
apply prec_extract_eqs
by simp
definition array_assn where "array_assn A ≡ hr_comp is_array (⟨the_pure A⟩list_rel)"
lemmas [safe_constraint_rules] = CN_FALSEI[of is_pure "array_assn A" for A]
definition [simp,code_unfold]: "heap_array_empty ≡ Array.of_list []"
definition [simp,code_unfold]: "heap_array_set p i v ≡ Array.upd i v p"
context
notes [fcomp_norm_unfold] = array_assn_def[symmetric]
notes [intro!] = hfrefI hn_refineI[THEN hn_refine_preI]
notes [simp] = pure_def hn_ctxt_def is_array_def invalid_assn_def
begin
lemma array_empty_hnr_aux: "(uncurry0 heap_array_empty,uncurry0 (RETURN op_list_empty)) ∈ unit_assn⇧k →⇩a is_array"
by sep_auto
sepref_decl_impl (no_register) array_empty: array_empty_hnr_aux .
lemma array_replicate_hnr_aux:
"(uncurry Array.new, uncurry (RETURN oo op_list_replicate))
∈ nat_assn⇧k *⇩a id_assn⇧k →⇩a is_array"
by (sep_auto)
sepref_decl_impl (no_register) array_replicate: array_replicate_hnr_aux .
definition [simp]: "op_array_replicate ≡ op_list_replicate"
sepref_register op_array_replicate
lemma array_fold_custom_replicate:
"replicate = op_array_replicate"
"op_list_replicate = op_array_replicate"
"mop_list_replicate = RETURN oo op_array_replicate"
by (auto simp: op_array_replicate_def intro!: ext)
lemmas array_replicate_custom_hnr[sepref_fr_rules] = array_replicate_hnr[unfolded array_fold_custom_replicate]
lemma array_of_list_hnr_aux: "(Array.of_list,RETURN o op_list_copy) ∈ (list_assn id_assn)⇧k →⇩a is_array"
unfolding list_assn_pure_conv
by (sep_auto)
sepref_decl_impl (no_register) array_of_list: array_of_list_hnr_aux .
definition [simp]: "op_array_of_list ≡ op_list_copy"
sepref_register op_array_of_list
lemma array_fold_custom_of_list:
"l = op_array_of_list l"
"op_list_copy = op_array_of_list"
"mop_list_copy = RETURN o op_array_of_list"
by (auto intro!: ext)
lemmas array_of_list_custom_hnr[sepref_fr_rules] = array_of_list_hnr[folded op_array_of_list_def]
lemma array_copy_hnr_aux: "(array_copy, RETURN o op_list_copy) ∈ is_array⇧k →⇩a is_array"
by sep_auto
sepref_decl_impl array_copy: array_copy_hnr_aux .
lemma array_get_hnr_aux: "(uncurry Array.nth,uncurry (RETURN oo op_list_get)) ∈ [λ(l,i). i<length l]⇩a is_array⇧k *⇩a nat_assn⇧k → id_assn"
by sep_auto
sepref_decl_impl array_get: array_get_hnr_aux .
lemma array_set_hnr_aux: "(uncurry2 heap_array_set,uncurry2 (RETURN ooo op_list_set)) ∈ [λ((l,i),_). i<length l]⇩a is_array⇧d *⇩a nat_assn⇧k *⇩a id_assn⇧k → is_array"
by sep_auto
sepref_decl_impl array_set: array_set_hnr_aux .
lemma array_length_hnr_aux: "(Array.len,RETURN o op_list_length) ∈ is_array⇧k →⇩a nat_assn"
by sep_auto
sepref_decl_impl array_length: array_length_hnr_aux .
end
definition [simp]: "op_array_empty ≡ op_list_empty"
interpretation array: list_custom_empty "array_assn A" heap_array_empty op_array_empty
apply unfold_locales
apply (rule array_empty_hnr[simplified pre_list_empty_def])
by (auto)
end
Theory IICF_MS_Array_List
theory IICF_MS_Array_List
imports
"../Intf/IICF_List"
Separation_Logic_Imperative_HOL.Array_Blit
Separation_Logic_Imperative_HOL.Default_Insts
begin
type_synonym 'a ms_array_list = "'a Heap.array × nat"
definition "is_ms_array_list ms l ≡ λ(a,n). ∃⇩Al'. a ↦⇩a l' * ↑(n ≤ length l' ∧ l = take n l' ∧ ms=length l')"
lemma is_ms_array_list_prec[safe_constraint_rules]: "precise (is_ms_array_list ms)"
unfolding is_ms_array_list_def[abs_def]
apply(rule preciseI)
apply(simp split: prod.splits)
using preciseD snga_prec by fastforce
definition "marl_empty_sz maxsize ≡ do {
a ← Array.new maxsize default;
return (a,0)
}"
definition "marl_append ≡ λ(a,n) x. do {
a ← Array.upd n x a;
return (a,n+1)
}"
definition marl_length :: "'a::heap ms_array_list ⇒ nat Heap" where
"marl_length ≡ λ(a,n). return (n)"
definition marl_is_empty :: "'a::heap ms_array_list ⇒ bool Heap" where
"marl_is_empty ≡ λ(a,n). return (n=0)"
definition marl_last :: "'a::heap ms_array_list ⇒ 'a Heap" where
"marl_last ≡ λ(a,n). do {
Array.nth a (n - 1)
}"
definition marl_butlast :: "'a::heap ms_array_list ⇒ 'a ms_array_list Heap" where
"marl_butlast ≡ λ(a,n). do {
return (a,n - 1)
}"
definition marl_get :: "'a::heap ms_array_list ⇒ nat ⇒ 'a Heap" where
"marl_get ≡ λ(a,n) i. Array.nth a i"
definition marl_set :: "'a::heap ms_array_list ⇒ nat ⇒ 'a ⇒ 'a ms_array_list Heap" where
"marl_set ≡ λ(a,n) i x. do { a ← Array.upd i x a; return (a,n)}"
lemma marl_empty_sz_rule[sep_heap_rules]: "< emp > marl_empty_sz N <is_ms_array_list N []>"
by (sep_auto simp: marl_empty_sz_def is_ms_array_list_def)
lemma marl_append_rule[sep_heap_rules]: "length l < N ⟹
< is_ms_array_list N l a >
marl_append a x
<λa. is_ms_array_list N (l@[x]) a >⇩t"
by (sep_auto
simp: marl_append_def is_ms_array_list_def take_update_last
split: prod.splits)
lemma marl_length_rule[sep_heap_rules]: "
<is_ms_array_list N l a>
marl_length a
<λr. is_ms_array_list N l a * ↑(r=length l)>"
by (sep_auto simp: marl_length_def is_ms_array_list_def)
lemma marl_is_empty_rule[sep_heap_rules]: "
<is_ms_array_list N l a>
marl_is_empty a
<λr. is_ms_array_list N l a * ↑(r⟷(l=[]))>"
by (sep_auto simp: marl_is_empty_def is_ms_array_list_def)
lemma marl_last_rule[sep_heap_rules]: "
l≠[] ⟹
<is_ms_array_list N l a>
marl_last a
<λr. is_ms_array_list N l a * ↑(r=last l)>"
by (sep_auto simp: marl_last_def is_ms_array_list_def last_take_nth_conv)
lemma marl_butlast_rule[sep_heap_rules]: "
l≠[] ⟹
<is_ms_array_list N l a>
marl_butlast a
<is_ms_array_list N (butlast l)>⇩t"
by (sep_auto
split: prod.splits
simp: marl_butlast_def is_ms_array_list_def butlast_take)
lemma marl_get_rule[sep_heap_rules]: "
i<length l ⟹
<is_ms_array_list N l a>
marl_get a i
<λr. is_ms_array_list N l a * ↑(r=l!i)>"
by (sep_auto simp: marl_get_def is_ms_array_list_def split: prod.split)
lemma marl_set_rule[sep_heap_rules]: "
i<length l ⟹
<is_ms_array_list N l a>
marl_set a i x
<is_ms_array_list N (l[i:=x])>"
by (sep_auto simp: marl_set_def is_ms_array_list_def split: prod.split)
definition "marl_assn N A ≡ hr_comp (is_ms_array_list N) (⟨the_pure A⟩list_rel)"
lemmas [safe_constraint_rules] = CN_FALSEI[of is_pure "marl_assn N A" for N A]
context
notes [fcomp_norm_unfold] = marl_assn_def[symmetric]
notes [intro!] = hfrefI hn_refineI[THEN hn_refine_preI]
notes [simp] = pure_def hn_ctxt_def invalid_assn_def
begin
definition [simp]: "op_marl_empty_sz (N::nat) ≡ op_list_empty"
context fixes N :: nat begin
sepref_register "PR_CONST (op_marl_empty_sz N)"
end
lemma [def_pat_rules]: "op_marl_empty_sz$N ≡ UNPROTECT (op_marl_empty_sz N)" by simp
lemma marl_fold_custom_empty_sz:
"op_list_empty = op_marl_empty_sz N"
"mop_list_empty = RETURN (op_marl_empty_sz N)"
"[] = op_marl_empty_sz N"
by auto
lemma marl_empty_hnr_aux: "(uncurry0 (marl_empty_sz N), uncurry0 (RETURN op_list_empty)) ∈ unit_assn⇧k →⇩a is_ms_array_list N"
by sep_auto
lemmas marl_empty_hnr = marl_empty_hnr_aux[FCOMP op_list_empty.fref[of "the_pure A" for A]]
lemmas marl_empty_hnr_mop = marl_empty_hnr[FCOMP mk_mop_rl0_np[OF mop_list_empty_alt]]
lemma marl_empty_sz_hnr[sepref_fr_rules]:
"(uncurry0 (marl_empty_sz N), uncurry0 (RETURN (PR_CONST (op_marl_empty_sz N)))) ∈ unit_assn⇧k →⇩a marl_assn N A"
using marl_empty_hnr
by simp
lemma marl_append_hnr_aux: "(uncurry marl_append,uncurry (RETURN oo op_list_append)) ∈ [λ(l,_). length l<N]⇩a ((is_ms_array_list N)⇧d *⇩a id_assn⇧k) → is_ms_array_list N"
by sep_auto
lemmas marl_append_hnr[sepref_fr_rules] = marl_append_hnr_aux[FCOMP op_list_append.fref]
lemmas marl_append_hnr_mop[sepref_fr_rules] = marl_append_hnr[FCOMP mk_mop_rl2_np[OF mop_list_append_alt]]
lemma marl_length_hnr_aux: "(marl_length,RETURN o op_list_length) ∈ (is_ms_array_list N)⇧k →⇩a nat_assn"
by sep_auto
lemmas marl_length_hnr[sepref_fr_rules] = marl_length_hnr_aux[FCOMP op_list_length.fref[of "the_pure A" for A]]
lemmas marl_length_hnr_mop[sepref_fr_rules] = marl_length_hnr[FCOMP mk_mop_rl1_np[OF mop_list_length_alt]]
lemma marl_is_empty_hnr_aux: "(marl_is_empty,RETURN o op_list_is_empty) ∈ (is_ms_array_list N)⇧k →⇩a bool_assn"
by sep_auto
lemmas marl_is_empty_hnr[sepref_fr_rules] = marl_is_empty_hnr_aux[FCOMP op_list_is_empty.fref[of "the_pure A" for A]]
lemmas marl_is_empty_hnr_mop[sepref_fr_rules] = marl_is_empty_hnr[FCOMP mk_mop_rl1_np[OF mop_list_is_empty_alt]]
lemma marl_last_hnr_aux: "(marl_last,RETURN o op_list_last) ∈ [λx. x≠[]]⇩a (is_ms_array_list N)⇧k → id_assn"
by sep_auto
lemmas marl_last_hnr[sepref_fr_rules] = marl_last_hnr_aux[FCOMP op_list_last.fref]
lemmas marl_last_hnr_mop[sepref_fr_rules] = marl_last_hnr[FCOMP mk_mop_rl1[OF mop_list_last_alt]]
lemma marl_butlast_hnr_aux: "(marl_butlast,RETURN o op_list_butlast) ∈ [λx. x≠[]]⇩a (is_ms_array_list N)⇧d → (is_ms_array_list N)"
by sep_auto
lemmas marl_butlast_hnr[sepref_fr_rules] = marl_butlast_hnr_aux[FCOMP op_list_butlast.fref[of "the_pure A" for A]]
lemmas marl_butlast_hnr_mop[sepref_fr_rules] = marl_butlast_hnr[FCOMP mk_mop_rl1[OF mop_list_butlast_alt]]
lemma marl_get_hnr_aux: "(uncurry marl_get,uncurry (RETURN oo op_list_get)) ∈ [λ(l,i). i<length l]⇩a ((is_ms_array_list N)⇧k *⇩a nat_assn⇧k) → id_assn"
by sep_auto
lemmas marl_get_hnr[sepref_fr_rules] = marl_get_hnr_aux[FCOMP op_list_get.fref]
lemmas marl_get_hnr_mop[sepref_fr_rules] = marl_get_hnr[FCOMP mk_mop_rl2[OF mop_list_get_alt]]
lemma marl_set_hnr_aux: "(uncurry2 marl_set,uncurry2 (RETURN ooo op_list_set)) ∈ [λ((l,i),_). i<length l]⇩a ((is_ms_array_list N)⇧d *⇩a nat_assn⇧k *⇩a id_assn⇧k) → (is_ms_array_list N)"
by sep_auto
lemmas marl_set_hnr[sepref_fr_rules] = marl_set_hnr_aux[FCOMP op_list_set.fref]
lemmas marl_set_hnr_mop[sepref_fr_rules] = marl_set_hnr[FCOMP mk_mop_rl3[OF mop_list_set_alt]]
end
context
fixes N :: nat
assumes N_sz: "N>10"
begin
schematic_goal "hn_refine (emp) (?c::?'c Heap) ?Γ' ?R (do {
let x = op_marl_empty_sz N;
RETURN (x@[1::nat])
})"
using N_sz
by sepref
end
schematic_goal "hn_refine (emp) (?c::?'c Heap) ?Γ' ?R (do {
let x = op_list_empty;
RETURN (x@[1::nat])
})"
apply (subst marl_fold_custom_empty_sz[where N=10])
apply sepref
done
end
Theory IICF_Indexed_Array_List
theory IICF_Indexed_Array_List
imports
"HOL-Library.Rewrite"
"../Intf/IICF_List"
"List-Index.List_Index"
IICF_Array
IICF_MS_Array_List
begin
text ‹We implement distinct lists of natural numbers in the range ‹{0..<N}›
by a length counter and two arrays of size ‹N›.
The first array stores the list, and the second array stores the positions of
the elements in the list, or ‹N› if the element is not in the list.
This allows for an efficient index query.
The implementation is done in two steps:
First, we use a list and a fixed size list for the index mapping.
Second, we refine the lists to arrays.
›
type_synonym aial = "nat list × nat list"
locale ial_invar = fixes
maxsize :: nat
and l :: "nat list"
and qp :: "nat list"
assumes maxsize_eq[simp]: "maxsize = length qp"
assumes l_distinct[simp]: "distinct l"
assumes l_set: "set l ⊆ {0..<length qp}"
assumes qp_def: "∀k<length qp. qp!k = (if k∈set l then List_Index.index l k else length qp)"
begin
lemma l_len: "length l ≤ length qp"
proof -
from card_mono[OF _ l_set] have "card (set l) ≤ length qp" by auto
with distinct_card[OF l_distinct] show ?thesis by simp
qed
lemma idx_len[simp]: "i<length l ⟹ l!i < length qp"
using l_set
by (metis atLeastLessThan_iff nth_mem psubsetD psubsetI)
lemma l_set_simp[simp]: "k∈set l ⟹ k < length qp"
by (auto dest: subsetD[OF l_set])
lemma qpk_idx: "k<length qp ⟹ qp ! k < length l ⟷ k ∈ set l"
proof (rule iffI)
assume A: "k<length qp"
{
assume "qp!k < length l"
hence "qp!k < length qp" using l_len by simp
with spec[OF qp_def, of k] A show "k∈set l"
by (auto split: if_split_asm)
}
{
assume "k∈set l"
thus "qp!k<length l"
using qp_def by (auto split: if_split_asm) []
}
qed
lemma lqpk[simp]: "k ∈ set l ⟹ l ! (qp ! k) = k"
using spec[OF qp_def, of k] by auto
lemma "⟦i<length l; j<length l; l!i=l!j⟧ ⟹ i=j"
by (simp add: nth_eq_iff_index_eq)
lemmas index_swap[simp] = index_swap_if_distinct[folded swap_def, OF l_distinct]
lemma swap_invar:
assumes "i<length l" "j<length l"
shows "ial_invar (length qp) (swap l i j) (qp[l ! j := i, l ! i := j])"
using assms
apply unfold_locales
apply auto []
apply auto []
apply auto []
apply (auto simp: simp: nth_list_update nth_eq_iff_index_eq index_nth_id) []
using qp_def apply auto [2]
done
end
definition "ial_rel1 maxsize ≡ br fst (uncurry (ial_invar maxsize))"
definition ial_assn2 :: "nat ⇒ nat list * nat list ⇒ _" where
"ial_assn2 maxsize ≡ prod_assn (marl_assn maxsize nat_assn) (array_assn nat_assn)"
definition "ial_assn maxsize A ≡ hr_comp (hr_comp (ial_assn2 maxsize) (ial_rel1 maxsize)) (⟨the_pure A⟩list_rel)"
lemmas [safe_constraint_rules] = CN_FALSEI[of is_pure "ial_assn maxsize A" for maxsize A]
subsection ‹Empty›
definition op_ial_empty_sz :: "nat ⇒ 'a list"
where [simp]: "op_ial_empty_sz ms ≡ op_list_empty"
lemma [def_pat_rules]: "op_ial_empty_sz$maxsize ≡ UNPROTECT (op_ial_empty_sz maxsize)"
by simp
context fixes maxsize :: nat begin
sepref_register "PR_CONST (op_ial_empty_sz maxsize)"
end
context
fixes maxsize :: nat
notes [fcomp_norm_unfold] = ial_assn_def[symmetric]
notes [simp] = hn_ctxt_def pure_def
begin
definition "aial_empty ≡ do {
let l = op_marl_empty_sz maxsize;
let qp = op_array_replicate maxsize maxsize;
RETURN (l,qp)
}"
lemma aial_empty_impl: "(aial_empty,RETURN op_list_empty) ∈ ⟨ial_rel1 maxsize⟩nres_rel"
unfolding aial_empty_def
apply (refine_vcg nres_relI)
apply (clarsimp simp: ial_rel1_def br_def)
apply unfold_locales
apply auto
done
context
notes [id_rules] = itypeI[Pure.of maxsize "TYPE(nat)"]
notes [sepref_import_param] = IdI[of maxsize]
begin
sepref_definition ial_empty is "uncurry0 aial_empty" :: "unit_assn⇧k →⇩a ial_assn2 maxsize"
unfolding aial_empty_def ial_assn2_def
using [[id_debug]]
by sepref
end
sepref_decl_impl (no_register) ial_empty: ial_empty.refine[FCOMP aial_empty_impl] .
lemma ial_empty_sz_hnr[sepref_fr_rules]:
"(uncurry0 local.ial_empty, uncurry0 (RETURN (PR_CONST (op_ial_empty_sz maxsize)))) ∈ unit_assn⇧k →⇩a ial_assn maxsize A"
using ial_empty_hnr[of A] by simp
subsection ‹Swap›
definition "aial_swap ≡ λ(l,qp) i j. do {
vi ← mop_list_get l i;
vj ← mop_list_get l j;
l ← mop_list_set l i vj;
l ← mop_list_set l j vi;
qp ← mop_list_set qp vj i;
qp ← mop_list_set qp vi j;
RETURN (l,qp)
}"
lemma in_ial_rel1_conv:
"((pq, qp), l) ∈ ial_rel1 ms ⟷ pq=l ∧ ial_invar ms l qp"
by (auto simp: ial_rel1_def in_br_conv)
lemma aial_swap_impl:
"(aial_swap,mop_list_swap) ∈ ial_rel1 maxsize → nat_rel → nat_rel → ⟨ial_rel1 maxsize⟩nres_rel"
proof (intro fun_relI nres_relI; clarsimp simp: in_ial_rel1_conv; refine_vcg; clarsimp)
fix l qp i j
assume [simp]: "i<length l" "j<length l" and "ial_invar maxsize l qp"
then interpret ial_invar maxsize l qp by simp
show "aial_swap (l, qp) i j ≤ SPEC (λc. (c, swap l i j) ∈ ial_rel1 maxsize)"
unfolding aial_swap_def
apply refine_vcg
apply (vc_solve simp add: in_ial_rel1_conv swap_def[symmetric] swap_invar)
done
qed
sepref_definition ial_swap is
"uncurry2 aial_swap" :: "(ial_assn2 maxsize)⇧d *⇩a nat_assn⇧k *⇩a nat_assn⇧k →⇩a ial_assn2 maxsize"
unfolding aial_swap_def ial_assn2_def
by sepref
sepref_decl_impl (ismop) test: ial_swap.refine[FCOMP aial_swap_impl]
uses mop_list_swap.fref .
subsection ‹Length›
definition aial_length :: "aial ⇒ nat nres"
where "aial_length ≡ λ(l,_). RETURN (op_list_length l)"
lemma aial_length_impl: "(aial_length, mop_list_length) ∈ ial_rel1 maxsize → ⟨nat_rel⟩nres_rel"
apply (intro fun_relI nres_relI)
unfolding ial_rel1_def in_br_conv aial_length_def
by auto
sepref_definition ial_length is "aial_length" :: "(ial_assn2 maxsize)⇧k →⇩a nat_assn"
unfolding aial_length_def ial_assn2_def
by sepref
sepref_decl_impl (ismop) ial_length: ial_length.refine[FCOMP aial_length_impl] .
subsection ‹Index›
definition aial_index :: "aial ⇒ nat ⇒ nat nres" where
"aial_index ≡ λ(l,qp) k. do {
ASSERT (k∈set l);
i ← mop_list_get qp k;
RETURN i
}"
lemma aial_index_impl:
"(uncurry aial_index, uncurry mop_list_index) ∈
[λ(l,k). k∈set l]⇩f ial_rel1 maxsize ×⇩r nat_rel → ⟨nat_rel⟩nres_rel"
apply (intro fun_relI nres_relI frefI)
unfolding ial_rel1_def
proof (clarsimp simp: in_br_conv)
fix l qp k
assume "ial_invar maxsize l qp"
then interpret ial_invar maxsize l qp .
assume "k∈set l"
then show "aial_index (l,qp) k ≤ RETURN (index l k)"
unfolding aial_index_def
apply (refine_vcg)
by (auto simp: qp_def)
qed
sepref_definition ial_index is "uncurry aial_index" :: "(ial_assn2 maxsize)⇧k *⇩a nat_assn⇧k →⇩a nat_assn"
unfolding aial_index_def ial_assn2_def
by sepref
sepref_decl_impl (ismop) ial_index: ial_index.refine[FCOMP aial_index_impl] .
subsection ‹Butlast›
definition aial_butlast :: "aial ⇒ aial nres" where
"aial_butlast ≡ λ(l,qp). do {
ASSERT (l≠[]);
len ← mop_list_length l;
k ← mop_list_get l (len - 1);
l ← mop_list_butlast l;
qp ← mop_list_set qp k (length qp);
RETURN (l,qp)
}"
lemma aial_butlast_refine: "(aial_butlast, mop_list_butlast) ∈ ial_rel1 maxsize → ⟨ial_rel1 maxsize⟩nres_rel"
apply (intro fun_relI nres_relI)
unfolding ial_rel1_def
proof (clarsimp simp: in_br_conv simp del: mop_list_butlast_alt)
fix l qp
assume "ial_invar maxsize l qp"
then interpret ial_invar maxsize l qp .
{
assume A: "l≠[]"
have "ial_invar (length qp) (butlast l) (qp[l ! (length l - Suc 0) := length qp])"
apply standard
apply clarsimp_all
apply (auto simp: distinct_butlast) []
using l_set apply (auto dest: in_set_butlastD) []
using qp_def A l_distinct
apply (auto simp: nth_list_update neq_Nil_rev_conv index_append simp del: l_distinct)
done
} note aux1=this
show "aial_butlast (l, qp) ≤ ⇓ (br fst (uncurry (ial_invar maxsize))) (mop_list_butlast l)"
unfolding aial_butlast_def mop_list_butlast_alt
apply refine_vcg
apply (clarsimp_all simp: in_br_conv aux1)
done
qed
sepref_definition ial_butlast is aial_butlast :: "(ial_assn2 maxsize)⇧d →⇩a ial_assn2 maxsize"
unfolding aial_butlast_def ial_assn2_def by sepref
sepref_decl_impl (ismop) ial_butlast: ial_butlast.refine[FCOMP aial_butlast_refine] .
subsection ‹Append›
definition aial_append :: "aial ⇒ nat ⇒ aial nres" where
"aial_append ≡ λ(l,qp) k. do {
ASSERT (k<length qp ∧ k∉set l ∧ length l < length qp);
len ← mop_list_length l;
l ← mop_list_append l k;
qp ← mop_list_set qp k len;
RETURN (l,qp)
}"
lemma aial_append_refine:
"(uncurry aial_append,uncurry mop_list_append) ∈
[λ(l,k). k<maxsize ∧ k∉set l]⇩f ial_rel1 maxsize ×⇩r nat_rel → ⟨ial_rel1 maxsize⟩nres_rel"
apply (intro frefI nres_relI)
unfolding ial_rel1_def
proof (clarsimp simp: in_br_conv)
fix l qp k
assume KLM: "k<maxsize" and KNL: "k∉set l"
assume "ial_invar maxsize l qp"
then interpret ial_invar maxsize l qp .
from KLM have KLL: "k<length qp" by simp
note distinct_card[OF l_distinct, symmetric]
also from KNL l_set have "set l ⊆ {0..<k} ∪ {Suc k..<length qp}"
by (auto simp: nat_less_le)
from card_mono[OF _ this] have "card (set l) ≤ card …"
by simp
also note card_Un_le
also have "card {0..<k} + card {Suc k..<length qp} = k + (length qp - Suc k)"
by simp
also have "… < length qp" using KLL by simp
finally have LLEN: "length l < length qp" .
have aux1[simp]: "ial_invar (length qp) (l @ [k]) (qp[k := length l])"
apply standard
apply (clarsimp_all simp: KNL KLL)
using KLL apply (auto simp: Suc_le_eq LLEN) []
apply (auto simp: index_append KNL nth_list_update')
apply (simp add: qp_def)
apply (simp add: qp_def)
done
show "aial_append (l, qp) k ≤ ⇓ (br fst (uncurry (ial_invar maxsize))) (RETURN (l@[k]))"
unfolding aial_append_def mop_list_append_def
apply refine_vcg
apply (clarsimp_all simp: in_br_conv KLL KNL LLEN)
done
qed
private lemma aial_append_impl_aux: "((l, qp), l') ∈ ial_rel1 maxsize ⟹ l'=l ∧ maxsize = length qp"
unfolding ial_rel1_def
by (clarsimp simp: in_br_conv ial_invar.maxsize_eq[symmetric])
context
notes [dest!] = aial_append_impl_aux
begin
sepref_definition ial_append is
"uncurry aial_append" :: "[λ(lqp,_). lqp∈Domain (ial_rel1 maxsize)]⇩a (ial_assn2 maxsize)⇧d *⇩a nat_assn⇧k → ial_assn2 maxsize"
unfolding aial_append_def ial_assn2_def
by sepref
end
lemma "(λb. b<maxsize, X) ∈ A → bool_rel"
apply auto
oops
context begin
private lemma append_fref': "⟦IS_BELOW_ID R⟧
⟹ (uncurry mop_list_append, uncurry mop_list_append) ∈ ⟨R⟩list_rel ×⇩r R →⇩f ⟨⟨R⟩list_rel⟩nres_rel"
by (rule mop_list_append.fref)
sepref_decl_impl (ismop) ial_append: ial_append.refine[FCOMP aial_append_refine] uses append_fref'
unfolding IS_BELOW_ID_def
apply (parametricity; auto simp: single_valued_below_Id)
done
end
subsection ‹Get›
definition aial_get :: "aial ⇒ nat ⇒ nat nres" where
"aial_get ≡ λ(l,qp) i. mop_list_get l i"
lemma aial_get_refine: "(aial_get,mop_list_get) ∈ ial_rel1 maxsize → nat_rel → ⟨nat_rel⟩nres_rel"
apply (intro fun_relI nres_relI)
unfolding aial_get_def ial_rel1_def mop_list_get_def in_br_conv
apply refine_vcg
apply clarsimp_all
done
sepref_definition ial_get is "uncurry aial_get" :: "(ial_assn2 maxsize)⇧k *⇩a nat_assn⇧k →⇩a nat_assn"
unfolding aial_get_def ial_assn2_def by sepref
sepref_decl_impl (ismop) ial_get: ial_get.refine[FCOMP aial_get_refine] .
subsection ‹Contains›
definition aial_contains :: "nat ⇒ aial ⇒ bool nres" where
"aial_contains ≡ λk (l,qp). do {
if k<maxsize then do {
i ← mop_list_get qp k;
RETURN (i<maxsize)
} else RETURN False
}"
lemma aial_contains_refine: "(uncurry aial_contains,uncurry mop_list_contains)
∈ (nat_rel ×⇩r ial_rel1 maxsize) →⇩f ⟨bool_rel⟩nres_rel"
apply (intro frefI nres_relI)
unfolding ial_rel1_def
proof (clarsimp simp: in_br_conv)
fix l qp k
assume "ial_invar maxsize l qp"
then interpret ial_invar maxsize l qp .
show "aial_contains k (l, qp) ≤ RETURN (k∈set l)"
unfolding aial_contains_def
apply refine_vcg
by (auto simp: l_len qp_def split: if_split_asm)
qed
context
notes [id_rules] = itypeI[Pure.of maxsize "TYPE(nat)"]
notes [sepref_import_param] = IdI[of maxsize]
begin
sepref_definition ial_contains is "uncurry aial_contains" :: "nat_assn⇧k *⇩a (ial_assn2 maxsize)⇧k →⇩a bool_assn"
unfolding aial_contains_def ial_assn2_def by sepref
end
sepref_decl_impl (ismop) ial_contains: ial_contains.refine[FCOMP aial_contains_refine] .
end
end
Theory IICF_Impl_Heapmap
section ‹Implementation of Heaps by Arrays›
theory IICF_Impl_Heapmap
imports IICF_Abs_Heapmap "../IICF_Indexed_Array_List"
begin
text ‹Some setup to circumvent the really inefficient implementation
of division in the code generator, which has to consider several
cases for negative divisors and dividends. ›
definition [code_unfold]:
"efficient_nat_div2 n
≡ nat_of_integer (fst (Code_Numeral.divmod_abs (integer_of_nat n) 2))"
lemma efficient_nat_div2[simp]: "efficient_nat_div2 n = n div 2"
by (simp add: efficient_nat_div2_def nat_of_integer.rep_eq)
type_synonym 'v hma = "nat list × ('v list)"
sepref_decl_intf 'v i_hma is "nat list × (nat ⇀ 'v)"
locale hmstruct_impl = hmstruct prio for prio :: "'v::heap ⇒ 'p::linorder"
begin
lemma param_prio: "(prio,prio) ∈ Id → Id" by simp
lemmas [sepref_import_param] = param_prio
sepref_register prio
end
context
fixes maxsize :: nat
fixes prio :: "'v::heap ⇒ 'p::linorder"
notes [map_type_eqs] = map_type_eqI[Pure.of "TYPE((nat,'v) ahm)" "TYPE('v i_hma)"]
begin
interpretation hmstruct .
interpretation hmstruct_impl .
definition "hm_impl1_α ≡ λ(pq,ml).
(pq,λk. if k∈set pq then Some (ml!k) else None)"
definition "hm_impl1_invar ≡ λ(pq,ml).
hmr_invar (hm_impl1_α (pq,ml))
∧ set pq ⊆ {0..<maxsize}
∧ ((pq=[] ∧ ml=[]) ∨ (length ml = maxsize))"
definition "hm_impl1_weak_invar ≡ λ(pq,ml).
set pq ⊆ {0..<maxsize}
∧ ((pq=[] ∧ ml=[]) ∨ (length ml = maxsize))"
definition "hm_impl1_rel ≡ br hm_impl1_α hm_impl1_invar"
definition "hm_weak_impl'_rel ≡ br hm_impl1_α hm_impl1_weak_invar"
lemmas hm_impl1_rel_defs =
hm_impl1_rel_def hm_weak_impl'_rel_def hm_impl1_weak_invar_def hm_impl1_invar_def hm_impl1_α_def in_br_conv
lemma hm_impl_α_fst_eq:
"(x1, x2) = hm_impl1_α (x1a, x2a) ⟹ x1 = x1a"
unfolding hm_impl1_α_def by (auto split: if_split_asm)
term hm_empty_op
definition hm_empty_op' :: "'v hma nres"
where "hm_empty_op' ≡ do {
let pq = op_ial_empty_sz maxsize;
let ml = op_list_empty;
RETURN (pq,ml)
}"
lemma hm_empty_op'_refine: "(hm_empty_op', hm_empty_op) ∈ ⟨hm_impl1_rel⟩nres_rel"
apply (intro fun_relI nres_relI)
unfolding hm_empty_op'_def hm_empty_op_def hm_impl1_rel_defs
by (auto simp: in_br_conv)
definition hm_length' :: "'v hma ⇒ nat" where "hm_length' ≡ λ(pq,ml). length pq"
lemma hm_length'_refine: "(RETURN o hm_length',RETURN o hm_length) ∈ hm_impl1_rel → ⟨nat_rel⟩nres_rel"
apply (intro fun_relI nres_relI)
unfolding hm_length'_def hm_length_def hm_impl1_rel_defs
by (auto)
term hm_key_of_op
definition "hm_key_of_op' ≡ λ(pq,ml) i. ASSERT (i>0) ⪢ mop_list_get pq (i - 1)"
lemma hm_key_of_op'_refine: "(hm_key_of_op', hm_key_of_op) ∈ hm_impl1_rel → nat_rel → ⟨nat_rel⟩nres_rel"
apply (intro fun_relI nres_relI)
unfolding hm_key_of_op'_def hm_key_of_op_def hm_impl1_rel_defs
by (auto)
term hm_lookup
definition "hm_lookup_op' ≡ λ(pq,ml) k. do {
if (k<maxsize) then do {
let c = op_list_contains k pq;
if c then do {
v ← mop_list_get ml k;
RETURN (Some v)
} else RETURN None
} else RETURN None
}"
lemma hm_lookup_op'_refine: "(uncurry hm_lookup_op', uncurry (RETURN oo hm_lookup))
∈ (hm_impl1_rel ×⇩r nat_rel) →⇩f ⟨⟨Id⟩option_rel⟩nres_rel"
apply (intro frefI nres_relI)
unfolding hm_lookup_op_def hm_lookup_op'_def o_def uncurry_def
apply refine_vcg
apply (auto simp: hm_impl1_rel_defs heapmap_α_def hmr_invar_def)
done
term hm_contains_key_op
definition "hm_contains_key_op' ≡ λk (pq,ml). do {
if (k<maxsize) then do {
RETURN (op_list_contains k pq)
} else RETURN False
}"
lemma hm_contains_key_op'_refine: "(uncurry hm_contains_key_op', uncurry hm_contains_key_op)
∈ (nat_rel ×⇩r hm_impl1_rel) →⇩f ⟨bool_rel⟩nres_rel"
apply (intro frefI nres_relI)
unfolding hm_contains_key_op_def hm_contains_key_op'_def o_def uncurry_def PR_CONST_def
apply refine_vcg
apply (auto simp: hm_impl1_rel_defs heapmap_α_def hmr_invar_def)
done
term hm_valid
definition "hm_exch_op' ≡ λ(pq,ml) i j. do {
ASSERT (hm_valid (hm_impl1_α (pq,ml)) i);
ASSERT (hm_valid (hm_impl1_α (pq,ml)) j);
pq ← mop_list_swap pq (i - 1) (j - 1);
RETURN (pq,ml)
}"
lemma hm_impl1_relI:
assumes "hmr_invar b"
assumes "(a,b)∈hm_weak_impl'_rel"
shows "(a,b)∈hm_impl1_rel"
using assms
unfolding hmr_rel_def hm_impl1_rel_def hm_weak_impl'_rel_def in_br_conv
hm_impl1_weak_invar_def hm_impl1_invar_def
by auto
lemma hm_impl1_nres_relI:
assumes "b ≤⇩n SPEC hmr_invar"
assumes "(a,b)∈⟨hm_weak_impl'_rel⟩nres_rel"
shows "(a,b)∈⟨hm_impl1_rel⟩nres_rel"
using assms hm_impl1_relI
apply (auto simp: pw_le_iff pw_leof_iff refine_pw_simps in_br_conv nres_rel_def)
apply blast
done
lemma hm_exch_op'_refine: "(hm_exch_op', hm_exch_op) ∈ hm_impl1_rel → nat_rel → nat_rel → ⟨hm_impl1_rel⟩nres_rel"
apply (intro fun_relI hm_impl1_nres_relI[OF hm_exch_op_invar])
unfolding hm_exch_op'_def hm_exch_op_def
apply (auto simp: pw_le_iff refine_pw_simps nres_rel_def
hm_impl1_rel_def in_br_conv split: prod.splits)
apply (auto simp: hm_impl1_α_def)
unfolding hm_impl1_rel_defs
apply auto
done
term hm_index_op
definition "hm_index_op' ≡ λ(pq,ml) k.
do {
ASSERT (hm_impl1_invar (pq,ml) ∧ heapmap_α (hm_impl1_α (pq,ml)) k ≠ None ∧ k∈set pq);
i ← mop_list_index pq k;
RETURN (i+1)
}"
lemma hm_index_op'_refine: "(hm_index_op',hm_index_op)
∈ hm_impl1_rel → nat_rel → ⟨nat_rel⟩nres_rel"
apply (intro fun_relI nres_relI)
unfolding hm_index_op'_def hm_index_op_def hm_impl1_rel_defs
apply (auto simp: pw_le_iff refine_pw_simps heapmap_α_def split: if_split_asm)
done
definition hm_update_op' where
"hm_update_op' ≡ λ(pq,ml) i v. do {
ASSERT (hm_valid (hm_impl1_α (pq,ml)) i ∧ hm_impl1_invar (pq,ml));
k ← mop_list_get pq (i - 1);
ml ← mop_list_set ml k v;
RETURN (pq, ml)
}"
lemma hm_update_op'_refine: "(hm_update_op', hm_update_op) ∈ hm_impl1_rel → nat_rel → Id → ⟨hm_impl1_rel⟩nres_rel"
apply (intro fun_relI hm_impl1_nres_relI[OF hm_update_op_invar])
unfolding hm_update_op'_def hm_update_op_def
apply (auto simp: pw_le_iff refine_pw_simps nres_rel_def
hm_impl1_rel_def in_br_conv split: prod.splits)
apply (auto simp: hm_impl1_α_def)
unfolding hm_impl1_rel_defs
apply (auto simp: subset_code(1))
done
term hm_butlast_op
lemma hm_butlast_op_invar: "hm_butlast_op hm ≤⇩n SPEC hmr_invar"
unfolding hm_butlast_op_def h.butlast_op_def
apply refine_vcg
apply (clarsimp_all simp: hmr_rel_defs map_butlast distinct_butlast)
apply safe
apply (auto simp: in_set_conv_nth nth_butlast) []
apply (metis Suc_pred len_greater_imp_nonempty length_greater_0_conv less_antisym)
apply (auto dest: in_set_butlastD) []
apply (metis One_nat_def append_butlast_last_id distinct_butlast last_conv_nth not_distinct_conv_prefix)
done
definition hm_butlast_op' where
"hm_butlast_op' ≡ λ(pq,ml). do {
ASSERT (hmr_invar (hm_impl1_α (pq,ml)));
pq ← mop_list_butlast pq;
RETURN (pq,ml)
}"
lemma set_butlast_distinct_conv:
"⟦distinct l⟧ ⟹ set (butlast l) = set l - {last l}"
by (cases l rule: rev_cases; auto)
lemma hm_butlast_op'_refine: "(hm_butlast_op', hm_butlast_op) ∈ hm_impl1_rel → ⟨hm_impl1_rel⟩nres_rel"
apply (intro fun_relI hm_impl1_nres_relI[OF hm_butlast_op_invar])
unfolding hm_butlast_op'_def hm_butlast_op_def
apply (auto simp: pw_le_iff refine_pw_simps nres_rel_def
hm_impl1_rel_def in_br_conv split: prod.splits)
apply (auto simp: hm_impl1_α_def)
unfolding hm_impl1_rel_defs
apply (auto simp: restrict_map_def) []
defer
apply (auto dest: in_set_butlastD) []
apply (auto intro!: ext
simp: hmr_invar_def set_butlast_distinct_conv last_conv_nth
dest: in_set_butlastD) []
done
definition hm_append_op'
where "hm_append_op' ≡ λ(pq,ml) k v. do {
ASSERT (k ∉ set pq ∧ k<maxsize);
ASSERT (hm_impl1_invar (pq,ml));
pq ← mop_list_append pq k;
ml ← (if length ml = 0 then mop_list_replicate maxsize v else RETURN ml);
ml ← mop_list_set ml k v;
RETURN (pq,ml)
}"
lemma hm_append_op'_refine: "(uncurry2 hm_append_op', uncurry2 hm_append_op)
∈ [λ((hm,k),v). k<maxsize]⇩f (hm_impl1_rel ×⇩r nat_rel) ×⇩r Id → ⟨hm_impl1_rel⟩nres_rel"
apply (intro frefI hm_impl1_nres_relI[OF hm_append_op_invar])
unfolding hm_append_op'_def hm_append_op_def
apply (auto simp: pw_le_iff refine_pw_simps nres_rel_def
hm_impl1_rel_def in_br_conv split: prod.splits)
unfolding hm_impl1_rel_defs
apply (auto simp: restrict_map_def hmr_invar_def split: prod.splits if_split_asm)
done
definition "hm_impl2_rel ≡ prod_assn (ial_assn maxsize id_assn) (array_assn id_assn)"
definition "hm_impl_rel ≡ hr_comp hm_impl2_rel hm_impl1_rel"
lemmas [fcomp_norm_unfold] = hm_impl_rel_def[symmetric]
subsection ‹Implement Basic Operations›
lemma param_parent: "(efficient_nat_div2,h.parent) ∈ Id → Id"
by (intro fun_relI) (simp add: h.parent_def)
lemmas [sepref_import_param] = param_parent
sepref_register h.parent
lemma param_left: "(h.left,h.left) ∈ Id → Id" by simp
lemmas [sepref_import_param] = param_left
sepref_register h.left
lemma param_right: "(h.right,h.right) ∈ Id → Id" by simp
lemmas [sepref_import_param] = param_right
sepref_register h.right
abbreviation (input) "prio_rel ≡ (Id::('p×'p) set)"
lemma param_prio_le: "((≤), (≤)) ∈ prio_rel → prio_rel → bool_rel" by simp
lemmas [sepref_import_param] = param_prio_le
lemma param_prio_lt: "((<), (<)) ∈ prio_rel → prio_rel → bool_rel" by simp
lemmas [sepref_import_param] = param_prio_lt
abbreviation "I_HM_UNF ≡ TYPE(nat list × 'v list)"
sepref_definition hm_length_impl is "RETURN o hm_length'" :: "hm_impl2_rel⇧k→⇩anat_assn"
unfolding hm_length'_def hm_impl2_rel_def
by sepref
lemmas [sepref_fr_rules] = hm_length_impl.refine[FCOMP hm_length'_refine]
sepref_register "hm_length::(nat,'v) ahm ⇒ _"
sepref_definition hm_key_of_op_impl is "uncurry hm_key_of_op'" :: "hm_impl2_rel⇧k*⇩anat_assn⇧k →⇩anat_assn"
unfolding hm_key_of_op'_def hm_impl2_rel_def
by sepref
lemmas [sepref_fr_rules] = hm_key_of_op_impl.refine[FCOMP hm_key_of_op'_refine]
sepref_register "hm_key_of_op::(nat,'v) ahm ⇒ _"
context
notes [id_rules] = itypeI[Pure.of maxsize "TYPE(nat)"]
notes [sepref_import_param] = IdI[of maxsize]
begin
sepref_definition hm_lookup_impl is "uncurry hm_lookup_op'" :: "(hm_impl2_rel⇧k*⇩anat_assn⇧k →⇩aoption_assn id_assn)"
unfolding hm_lookup_op'_def hm_impl2_rel_def
by sepref
lemmas [sepref_fr_rules] =
hm_lookup_impl.refine[FCOMP hm_lookup_op'_refine]
sepref_register "hm_lookup::(nat,'v) ahm ⇒ _"
sepref_definition hm_exch_op_impl is "uncurry2 hm_exch_op'" :: "hm_impl2_rel⇧d*⇩anat_assn⇧k*⇩anat_assn⇧k →⇩a hm_impl2_rel"
unfolding hm_exch_op'_def hm_impl2_rel_def
by sepref
lemmas [sepref_fr_rules] = hm_exch_op_impl.refine[FCOMP hm_exch_op'_refine]
sepref_register "hm_exch_op::(nat,'v) ahm ⇒ _"
sepref_definition hm_index_op_impl is "uncurry hm_index_op'" :: "hm_impl2_rel⇧k*⇩aid_assn⇧k →⇩a id_assn"
unfolding hm_index_op'_def hm_impl2_rel_def
by sepref
lemmas [sepref_fr_rules] = hm_index_op_impl.refine[FCOMP hm_index_op'_refine]
sepref_register "hm_index_op::(nat,'v) ahm ⇒ _"
sepref_definition hm_update_op_impl is "uncurry2 hm_update_op'" :: "hm_impl2_rel⇧d*⇩aid_assn⇧k*⇩aid_assn⇧k →⇩a hm_impl2_rel"
unfolding hm_update_op'_def hm_impl2_rel_def
by sepref
lemmas [sepref_fr_rules] = hm_update_op_impl.refine[FCOMP hm_update_op'_refine]
sepref_register "hm_update_op::(nat,'v) ahm ⇒ _"
sepref_definition hm_butlast_op_impl is "hm_butlast_op'" :: "hm_impl2_rel⇧d →⇩a hm_impl2_rel"
unfolding hm_butlast_op'_def hm_impl2_rel_def by sepref
lemmas [sepref_fr_rules] = hm_butlast_op_impl.refine[FCOMP hm_butlast_op'_refine]
sepref_register "hm_butlast_op::(nat,'v) ahm ⇒ _"
sepref_definition hm_append_op_impl is "uncurry2 hm_append_op'" :: "hm_impl2_rel⇧d *⇩a id_assn⇧k *⇩a id_assn⇧k →⇩a hm_impl2_rel"
unfolding hm_append_op'_def hm_impl2_rel_def
apply (rewrite array_fold_custom_replicate)
by sepref
lemmas [sepref_fr_rules] = hm_append_op_impl.refine[FCOMP hm_append_op'_refine]
sepref_register "hm_append_op::(nat,'v) ahm ⇒ _"
subsection ‹Auxiliary Operations›
lemmas [intf_of_assn] = intf_of_assnI[where R="hm_impl_rel :: (nat,'v) ahm ⇒ _" and 'a="'v i_hma"]
sepref_definition hm_valid_impl is "uncurry (RETURN oo hm_valid)" :: "hm_impl_rel⇧k*⇩anat_assn⇧k →⇩a bool_assn "
unfolding hm_valid_def[abs_def]
by sepref
lemmas [sepref_fr_rules] = hm_valid_impl.refine
sepref_register "hm_valid::(nat,'v) ahm ⇒ _"
definition "hm_the_lookup_op' hm k ≡ do {
let (pq,ml) = hm;
ASSERT (heapmap_α (hm_impl1_α hm) k ≠ None ∧ hm_impl1_invar hm);
v ← mop_list_get ml k;
RETURN v
}"
lemma hm_the_lookup_op'_refine:
"(hm_the_lookup_op', hm_the_lookup_op) ∈ hm_impl1_rel → nat_rel → ⟨Id⟩nres_rel"
apply (intro fun_relI nres_relI)
unfolding hm_the_lookup_op'_def hm_the_lookup_op_def
apply refine_vcg
apply (auto simp: hm_impl1_rel_defs heapmap_α_def hmr_invar_def split: if_split_asm)
done
sepref_definition hm_the_lookup_op_impl is "uncurry hm_the_lookup_op'" :: "hm_impl2_rel⇧k*⇩aid_assn⇧k →⇩aid_assn"
unfolding hm_the_lookup_op'_def[abs_def] hm_impl2_rel_def
by sepref
lemmas hm_the_lookup_op_impl[sepref_fr_rules] = hm_the_lookup_op_impl.refine[FCOMP hm_the_lookup_op'_refine]
sepref_register "hm_the_lookup_op::(nat,'v) ahm ⇒ _"
sepref_definition hm_val_of_op_impl is "uncurry hm_val_of_op" :: "hm_impl_rel⇧k*⇩aid_assn⇧k →⇩a id_assn"
unfolding hm_val_of_op_def by sepref
lemmas [sepref_fr_rules] = hm_val_of_op_impl.refine
sepref_register "hm_val_of_op::(nat,'v) ahm ⇒ _"
sepref_definition hm_prio_of_op_impl is "uncurry (PR_CONST hm_prio_of_op)" :: "hm_impl_rel⇧k*⇩aid_assn⇧k →⇩a id_assn"
unfolding hm_prio_of_op_def[abs_def] PR_CONST_def by sepref
lemmas [sepref_fr_rules] = hm_prio_of_op_impl.refine
sepref_register "PR_CONST hm_prio_of_op::(nat,'v) ahm ⇒ _"
lemma [def_pat_rules]: "hmstruct.hm_prio_of_op$prio ≡ PR_CONST hm_prio_of_op"
by simp
text ‹No code theorem preparation, as we define optimized version later›
sepref_definition (no_prep_code) hm_swim_op_impl is "uncurry (PR_CONST hm_swim_op)" :: "hm_impl_rel⇧d*⇩anat_assn⇧k →⇩a hm_impl_rel"
unfolding hm_swim_op_def[abs_def] PR_CONST_def
using [[goals_limit = 1]]
by sepref
lemmas [sepref_fr_rules] = hm_swim_op_impl.refine
sepref_register "PR_CONST hm_swim_op::(nat,'v) ahm ⇒ _"
lemma [def_pat_rules]: "hmstruct.hm_swim_op$prio ≡ PR_CONST hm_swim_op" by simp
text ‹No code theorem preparation, as we define optimized version later›
sepref_definition (no_prep_code) hm_sink_op_impl is "uncurry (PR_CONST hm_sink_op)" :: "hm_impl_rel⇧d*⇩anat_assn⇧k →⇩a hm_impl_rel"
unfolding hm_sink_op_def[abs_def] PR_CONST_def
by sepref
lemmas [sepref_fr_rules] = hm_sink_op_impl.refine
sepref_register "PR_CONST hm_sink_op::(nat,'v) ahm ⇒ _"
lemma [def_pat_rules]: "hmstruct.hm_sink_op$prio ≡ PR_CONST hm_sink_op" by simp
sepref_definition hm_repair_op_impl is "uncurry (PR_CONST hm_repair_op)" :: "hm_impl_rel⇧d*⇩anat_assn⇧k →⇩a hm_impl_rel"
unfolding hm_repair_op_def[abs_def] PR_CONST_def
by sepref
lemmas [sepref_fr_rules] = hm_repair_op_impl.refine
sepref_register "PR_CONST hm_repair_op::(nat,'v) ahm ⇒ _"
lemma [def_pat_rules]: "hmstruct.hm_repair_op$prio ≡ PR_CONST hm_repair_op" by simp
subsection ‹Interface Operations›
definition hm_rel_np where
"hm_rel_np ≡ hr_comp hm_impl_rel heapmap_rel"
lemmas [fcomp_norm_unfold] = hm_rel_np_def[symmetric]
definition hm_rel where
"hm_rel K V ≡ hr_comp hm_rel_np (⟨the_pure K,the_pure V⟩map_rel)"
lemmas [fcomp_norm_unfold] = hm_rel_def[symmetric]
lemmas [intf_of_assn] = intf_of_assnI[where R="hm_rel K V" and 'a="('kk,'vv) i_map" for K V]
lemma hm_rel_id_conv: "hm_rel id_assn id_assn = hm_rel_np"
unfolding hm_rel_def by simp
subsubsection ‹Synthesis›
definition op_hm_empty_sz :: "nat ⇒ 'kk⇀'vv"
where [simp]: "op_hm_empty_sz sz ≡ op_map_empty"
sepref_register "PR_CONST (op_hm_empty_sz maxsize)" :: "('k,'v) i_map"
lemma [def_pat_rules]: "op_hm_empty_sz$maxsize ≡ UNPROTECT (op_hm_empty_sz maxsize)" by simp
lemma hm_fold_custom_empty_sz:
"op_map_empty = op_hm_empty_sz sz"
"Map.empty = op_hm_empty_sz sz"
by auto
sepref_definition hm_empty_op_impl is "uncurry0 hm_empty_op'" :: "unit_assn⇧k →⇩a hm_impl2_rel"
unfolding hm_empty_op'_def hm_impl2_rel_def array.fold_custom_empty
by sepref
sepref_definition hm_insert_op_impl is "uncurry2 hm_insert_op" :: "[λ((k,_),_). k<maxsize]⇩a id_assn⇧k*⇩aid_assn⇧k*⇩ahm_impl_rel⇧d → hm_impl_rel"
unfolding hm_insert_op_def
by sepref
sepref_definition hm_is_empty_op_impl is "hm_is_empty_op" :: "hm_impl_rel⇧k →⇩a bool_assn"
unfolding hm_is_empty_op_def
by sepref
sepref_definition hm_lookup_op_impl is "uncurry hm_lookup_op" :: "id_assn⇧k*⇩ahm_impl_rel⇧k →⇩a option_assn id_assn"
unfolding hm_lookup_op_def by sepref
sepref_definition hm_contains_key_impl is "uncurry hm_contains_key_op'" :: "id_assn⇧k*⇩ahm_impl2_rel⇧k →⇩a bool_assn"
unfolding hm_contains_key_op'_def hm_impl2_rel_def
by sepref
sepref_definition hm_decrease_key_op_impl is "uncurry2 hm_decrease_key_op" :: "id_assn⇧k*⇩aid_assn⇧k*⇩ahm_impl_rel⇧d →⇩a hm_impl_rel"
unfolding hm_decrease_key_op_def by sepref
sepref_definition hm_increase_key_op_impl is "uncurry2 hm_increase_key_op" :: "id_assn⇧k*⇩aid_assn⇧k*⇩ahm_impl_rel⇧d →⇩a hm_impl_rel"
unfolding hm_increase_key_op_def by sepref
sepref_definition hm_change_key_op_impl is "uncurry2 hm_change_key_op" :: "id_assn⇧k*⇩aid_assn⇧k*⇩ahm_impl_rel⇧d →⇩a hm_impl_rel"
unfolding hm_change_key_op_def by sepref
sepref_definition hm_pop_min_op_impl is hm_pop_min_op :: "hm_impl_rel⇧d →⇩a prod_assn (prod_assn nat_assn id_assn) hm_impl_rel "
unfolding hm_pop_min_op_def[abs_def]
by sepref
sepref_definition hm_remove_op_impl is "uncurry hm_remove_op" :: "id_assn⇧k *⇩a hm_impl_rel⇧d →⇩a hm_impl_rel"
unfolding hm_remove_op_def[abs_def] by sepref
sepref_definition hm_peek_min_op_impl is "hm_peek_min_op" :: "hm_impl_rel⇧k →⇩a prod_assn nat_assn id_assn"
unfolding hm_peek_min_op_def[abs_def] hm_kv_of_op_def
by sepref
subsubsection ‹Setup of Refinements›
sepref_decl_impl (no_register) hm_empty:
hm_empty_op_impl.refine[FCOMP hm_empty_op'_refine, FCOMP hm_empty_aref] .
context fixes K assumes "IS_BELOW_ID K" begin
lemmas mop_map_update_new_fref' = mop_map_update_new.fref[of K]
lemmas op_map_update_fref' = op_map_update.fref[of K]
end
sepref_decl_impl (ismop) hm_insert: hm_insert_op_impl.refine[FCOMP hm_insert_op_aref]
uses mop_map_update_new_fref'
unfolding IS_BELOW_ID_def
apply (parametricity; auto simp: single_valued_below_Id)
done
sepref_decl_impl hm_is_empty: hm_is_empty_op_impl.refine[FCOMP hm_is_empty_op_aref] .
sepref_decl_impl hm_lookup: hm_lookup_op_impl.refine[FCOMP hm_lookup_op_aref] .
sepref_decl_impl hm_contains_key:
hm_contains_key_impl.refine[FCOMP hm_contains_key_op'_refine, FCOMP hm_contains_key_op_aref]
.
sepref_decl_impl (ismop) hm_decrease_key: hm_decrease_key_op_impl.refine[FCOMP hm_decrease_key_op_aref] .
sepref_decl_impl (ismop) hm_increase_key: hm_increase_key_op_impl.refine[FCOMP hm_increase_key_op_aref] .
sepref_decl_impl (ismop) hm_change_key: hm_change_key_op_impl.refine[FCOMP hm_change_key_op_aref] .
sepref_decl_impl (ismop) hm_remove: hm_remove_op_impl.refine[FCOMP hm_remove_op_aref] .
sepref_decl_impl (ismop) hm_pop_min: hm_pop_min_op_impl.refine[FCOMP hm_pop_min_op_aref] .
sepref_decl_impl (ismop) hm_peek_min: hm_peek_min_op_impl.refine[FCOMP hm_peek_min_op_aref] .
sepref_definition hm_upd_op_impl is "uncurry2 (RETURN ooo op_map_update)" :: "[λ((k,_),_). k<maxsize]⇩a id_assn⇧k *⇩a id_assn⇧k *⇩a (hm_rel id_assn id_assn)⇧d → hm_rel id_assn id_assn"
unfolding op_pm_set_gen_impl by sepref
sepref_decl_impl hm_upd_op_impl.refine[unfolded hm_rel_id_conv] uses op_map_update_fref'
unfolding IS_BELOW_ID_def
apply (parametricity; auto simp: single_valued_below_Id)
done
end
end
interpretation hm: map_custom_empty "PR_CONST (op_hm_empty_sz maxsize)"
apply unfold_locales by simp
lemma op_hm_empty_sz_hnr[sepref_fr_rules]:
"(uncurry0 (hm_empty_op_impl maxsize), uncurry0 (RETURN (PR_CONST (op_hm_empty_sz maxsize)))) ∈ unit_assn⇧k →⇩a hm_rel maxsize prio K V"
using hm_empty_hnr by simp
subsection ‹Manual fine-tuning of code-lemmas›
context
notes [simp del] = CNV_def efficient_nat_div2
begin
lemma nested_case_bind:
"(case p of (a,b) ⇒ bind (case a of (a1,a2) ⇒ m a b a1 a2) (f a b))
= (case p of ((a1,a2),b) ⇒ bind (m (a1,a2) b a1 a2) (f (a1,a2) b))"
"(case p of (a,b) ⇒ bind (case b of (b1,b2) ⇒ m a b b1 b2) (f a b))
= (case p of (a,b1,b2) ⇒ bind (m a (b1,b2) b1 b2) (f a (b1,b2)))"
by (simp_all split: prod.splits)
lemma it_case: "(case p of (a,b) ⇒ f p a b) = (case p of (a,b) ⇒ f (a,b) a b)"
by (auto split: prod.split)
lemma c2l: "(case p of (a,b) ⇒ bind (m a b) (f a b)) =
do { let (a,b) = p; bind (m a b) (f a b)}" by simp
lemma bind_Let: "do { x ← do { let y = v; (f y :: 'a Heap)}; g x } = do { let y=v; x ← f y; g x }" by auto
lemma bind_case: "do { x ← (case y of (a,b) ⇒ f a b); (g x :: 'a Heap) } = do { let (a,b) = y; x ← f a b; g x }"
by (auto split: prod.splits)
lemma bind_case_mvup: "do { x ← f; case y of (a,b) ⇒ g a b x }
= do { let (a,b) = y; x ← f; (g a b x :: 'a Heap) }"
by (auto split: prod.splits)
lemma if_case_mvup: "(if b then case p of (x1,x2) ⇒ f x1 x2 else e)
= (case p of (x1,x2) ⇒ if b then f x1 x2 else e)" by auto
lemma nested_case: "(case p of (a,b) ⇒ (case p of (c,d) ⇒ f a b c d)) =
(case p of (a,b) ⇒ f a b a b)"
by (auto split: prod.split)
lemma split_prod_bound: "(λp. f p) = (λ(a,b). f (a,b))" by auto
lemma bpc_conv: "do { (a,b) ← (m::(_*_) Heap); f a b } = do {
ab ← (m);
f (fst ab) (snd ab)
}"
apply (subst (2) split_prod_bound)
by simp
lemma it_case_pp: "(case p of ((p1,p2)) ⇒ case p of ((p1',p2')) ⇒ f p1 p2 p1' p2')
= (case p of ((p1,p2)) ⇒ f p1 p2 p1 p2)"
by (auto split: prod.split)
lemma it_case_ppp: "(case p of ((p1,p2),p3) ⇒ case p of ((p1',p2'),p3') ⇒ f p1 p2 p3 p1' p2' p3')
= (case p of ((p1,p2),p3) ⇒ f p1 p2 p3 p1 p2 p3)"
by (auto split: prod.split)
lemma it_case_pppp: "(case a1 of
(((a, b), c), d) ⇒
case a1 of
(((a', b'), c'), d') ⇒ f a b c d a' b' c' d') =
(case a1 of
(((a, b), c), d) ⇒ f a b c d a b c d)"
by (auto split: prod.splits)
private lemmas inlines = hm_append_op_impl_def ial_append_def
marl_length_def marl_append_def hm_length_impl_def ial_length_def
hm_valid_impl_def hm_prio_of_op_impl_def hm_val_of_op_impl_def hm_key_of_op_impl_def
ial_get_def hm_the_lookup_op_impl_def heap_array_set_def marl_get_def
it_case_ppp it_case_pppp bind_case bind_case_mvup nested_case if_case_mvup
it_case_pp
schematic_goal [code]: "hm_insert_op_impl maxsize prio hm k v = ?f"
unfolding hm_insert_op_impl_def
apply (rule CNV_eqD)
apply (simp add: inlines cong: if_cong)
by (rule CNV_I)
schematic_goal "hm_swim_op_impl prio hm i ≡ ?f"
unfolding hm_swim_op_impl_def
apply (rule eq_reflection)
apply (rule CNV_eqD)
apply (simp add: inlines efficient_nat_div2
cong: if_cong)
by (rule CNV_I)
lemma hm_swim_op_impl_code[code]: "hm_swim_op_impl prio hm i ≡ ccpo.fixp (fun_lub Heap_lub) (fun_ord Heap_ord)
(λcf (a1, a2).
case a1 of
((a1b, a2b), a2a) ⇒
case a1b of
(a, b) ⇒ do {
let d2 = efficient_nat_div2 a2;
if 0 < d2 ∧ d2 ≤ b
then do {
x ← (case a1b of (a, n) ⇒ Array.nth a) (d2 - Suc 0);
x ← Array.nth a2a x;
xa ← (case a1b of (a, n) ⇒ Array.nth a) (a2 - Suc 0);
xa ← Array.nth a2a xa;
if prio x ≤ prio xa then return a1
else do {
x'g ← hm_exch_op_impl a1 a2 (d2);
cf (x'g, d2)
}
}
else return a1
})
(hm, i)"
unfolding hm_swim_op_impl_def
apply (rule eq_reflection)
apply (simp add: inlines efficient_nat_div2 Let_def
cong: if_cong)
done
prepare_code_thms hm_swim_op_impl_code
schematic_goal hm_sink_opt_impl_code[code]: "hm_sink_op_impl prio hm i ≡ ?f"
unfolding hm_sink_op_impl_def
apply (rule eq_reflection)
apply (rule CNV_eqD)
apply (simp add: inlines
cong: if_cong)
by (rule CNV_I)
prepare_code_thms hm_sink_opt_impl_code
export_code hm_swim_op_impl in SML_imp module_name Test
schematic_goal hm_change_key_opt_impl_code[code]: "
hm_change_key_op_impl prio k v hm ≡ ?f"
unfolding hm_change_key_op_impl_def
apply (rule eq_reflection)
apply (rule CNV_eqD)
apply (simp add: inlines hm_contains_key_impl_def ial_contains_def
hm_change_key_op_impl_def hm_index_op_impl_def hm_update_op_impl_def
ial_index_def
cong: if_cong split: prod.splits)
oops
schematic_goal hm_change_key_opt_impl_code[code]: "
hm_change_key_op_impl prio k v hm ≡ case hm of (((a, b), ba), x2) ⇒
(do {
x ← Array.nth ba k;
xa ← Array.nth a x;
xa ← Array.upd xa v x2;
hm_repair_op_impl prio (((a, b), ba), xa) (Suc x)
})"
unfolding hm_change_key_op_impl_def
apply (rule eq_reflection)
apply (simp add: inlines hm_contains_key_impl_def ial_contains_def
hm_change_key_op_impl_def hm_index_op_impl_def hm_update_op_impl_def
ial_index_def
cong: if_cong split: prod.splits)
done
schematic_goal hm_set_opt_impl_code[code]: "hm_upd_op_impl maxsize prio hm k v ≡ ?f"
unfolding hm_upd_op_impl_def
apply (rule eq_reflection)
apply (rule CNV_eqD)
apply (simp add: inlines hm_contains_key_impl_def ial_contains_def
hm_change_key_op_impl_def hm_index_op_impl_def hm_update_op_impl_def
ial_index_def
cong: if_cong)
by (rule CNV_I)
schematic_goal hm_pop_min_opt_impl_code[code]: "hm_pop_min_op_impl prio hm ≡ ?f"
unfolding hm_pop_min_op_impl_def
apply (rule eq_reflection)
apply (rule CNV_eqD)
apply (simp add: inlines hm_contains_key_impl_def ial_contains_def
hm_change_key_op_impl_def hm_index_op_impl_def hm_update_op_impl_def
hm_butlast_op_impl_def ial_butlast_def
ial_index_def
cong: if_cong)
by (rule CNV_I)
end
export_code
hm_empty_op_impl
hm_insert_op_impl
hm_is_empty_op_impl
hm_lookup_op_impl
hm_contains_key_impl
hm_decrease_key_op_impl
hm_increase_key_op_impl
hm_change_key_op_impl
hm_upd_op_impl
hm_pop_min_op_impl
hm_remove_op_impl
hm_peek_min_op_impl
checking SML_imp
end
Theory IICF_Matrix
section ‹Matrices›
theory IICF_Matrix
imports "../../Sepref"
begin
subsection ‹Relator and Interface›
definition [to_relAPP]: "mtx_rel A ≡ nat_rel ×⇩r nat_rel → A"
lemma mtx_rel_id[simp]: "⟨Id⟩mtx_rel = Id" unfolding mtx_rel_def by auto
type_synonym 'a mtx = "nat×nat ⇒ 'a"
sepref_decl_intf 'a i_mtx is "nat×nat ⇒ 'a"
lemma [synth_rules]: "INTF_OF_REL A TYPE('a) ⟹ INTF_OF_REL (⟨A⟩mtx_rel) TYPE('a i_mtx)"
by simp
subsection ‹Operations›
definition op_mtx_new :: "'a mtx ⇒ 'a mtx" where [simp]: "op_mtx_new c ≡ c"
sepref_decl_op (no_def) mtx_new: "op_mtx_new" :: "(nat_rel×⇩rnat_rel → A) → ⟨A⟩mtx_rel"
apply (rule fref_ncI) unfolding op_mtx_new_def[abs_def] mtx_rel_def
by parametricity
lemma mtx_init_adhoc_frame_match_rule[sepref_frame_match_rules]:
"hn_val (nat_rel×⇩rnat_rel → A) x y ⟹⇩t hn_val (nat_rel×⇩rnat_rel → the_pure (pure A)) x y"
by simp
definition op_mtx_copy :: "'a mtx ⇒ 'a mtx" where [simp]: "op_mtx_copy c ≡ c"
sepref_decl_op (no_def) mtx_copy: "op_mtx_copy" :: "⟨A⟩mtx_rel → ⟨A⟩mtx_rel" .
sepref_decl_op mtx_get: "λ(c::'a mtx) ij. c ij" :: "⟨A⟩mtx_rel → (nat_rel×⇩rnat_rel) → A"
apply (rule fref_ncI) unfolding mtx_rel_def
by parametricity
sepref_decl_op mtx_set: "fun_upd::'a mtx ⇒ _" :: "⟨A⟩mtx_rel → (nat_rel×⇩rnat_rel) → A → ⟨A⟩mtx_rel"
apply (rule fref_ncI)
unfolding mtx_rel_def
proof goal_cases case 1
have [param]: "((=), (=)) ∈ nat_rel ×⇩r nat_rel → nat_rel ×⇩r nat_rel → bool_rel" by simp
show ?case by parametricity
qed
definition mtx_nonzero :: "_ mtx ⇒ (nat×nat) set" where "mtx_nonzero m ≡ {(i,j). m (i,j)≠0}"
sepref_decl_op mtx_nonzero: "mtx_nonzero" :: "⟨A⟩mtx_rel → ⟨nat_rel×⇩rnat_rel⟩set_rel"
where "IS_ID (A::(_×(_::zero)) set)"
proof goal_cases
case 1
assume "IS_ID A"
hence U: "A=Id" by (simp only: IS_ID_def)
have [param]: "((=),(=))∈A→A→bool_rel" using U by simp
show ?case
apply (rule fref_ncI)
unfolding mtx_rel_def
apply parametricity
unfolding U by simp_all
qed
subsection ‹Patterns›
lemma pat_amtx_get: "c$e≡op_mtx_get$'c$'e" by simp
lemma pat_amtx_set: "fun_upd$c$e$v≡op_mtx_set$'c$'e$'v" by simp
lemmas amtx_pats[pat_rules] = pat_amtx_get pat_amtx_set
subsection ‹Pointwise Operations›
subsubsection ‹Auxiliary Definitions and Lemmas›
locale pointwise_op =
fixes f :: "'p ⇒ 's ⇒ 's"
fixes q :: "'s ⇒ 'p ⇒ 'a"
assumes upd_indep1[simp, intro]: "p≠p' ⟹ q (f p s) p' = q s p'"
assumes upd_indep2[simp, intro]: "p≠p' ⟹ q (f p (f p' s)) p = q (f p s) p"
begin
lemma pointwise_upd_fold: "distinct ps ⟹
q (fold f ps s) p = (if p∈set ps then q (f p s) p else q s p)"
by (induction ps arbitrary: s) auto
end
lemma pointwise_fun_fold:
fixes f :: "'a ⇒ ('a ⇒ 'b) ⇒ ('a ⇒ 'b)"
fixes s :: "'a ⇒ 'b"
assumes indep1: "⋀x x' s. x ≠ x' ⟹ f x s x' = s x'"
assumes indep2: "⋀x x' s. x ≠ x' ⟹ f x (f x' s) x = f x s x"
assumes [simp]: "distinct xs"
shows "fold f xs s x = (if x ∈ set xs then f x s x else s x)"
proof -
interpret pointwise_op f "λs. s"
by unfold_locales fact+
show ?thesis
using pointwise_upd_fold[of xs s x]
by auto
qed
lemma list_prod_divmod_eq: "List.product [0..<M] [0..<N] = map (λi. (i div N, i mod N)) [0..<N*M]"
proof -
have [simp]: "i < m*n ⟹ (i::nat) div n < m" for i m n
by (metis mult.commute div_eq_0_iff div_mult2_eq gr_implies_not_zero mult_not_zero)
have [simp]: "i<N*M ⟹ N>0 ∧ M>0" for i
by (cases N; cases M; auto)
show ?thesis
by (rule nth_equalityI) (auto simp add: product_nth algebra_simps)
qed
lemma nfoldli_prod_divmod_conv:
"nfoldli (List.product [0..<N] [0..<M]) ctd (λ(i,j). f i j) = nfoldli [0..<N*M] ctd (λi. f (i div M) (i mod M))"
apply (intro ext)
apply (subst list_prod_divmod_eq)
apply (simp add: nfoldli_map)
apply (fo_rule cong)+
apply (auto simp: algebra_simps)
done
lemma nfoldli_prod_divmod_conv':
"nfoldli [0..<M] ctd (λi. nfoldli [0..<N] ctd (f i)) = nfoldli [0..<N*M] ctd (λi. f (i div N) (i mod N))"
apply (intro ext)
apply (subst nfoldli_nfoldli_prod_conv)
by (simp add: nfoldli_prod_divmod_conv algebra_simps)
lemma foldli_prod_divmod_conv':
"foldli [0..<M] ctd (λi. foldli [0..<N] ctd (f i)) = foldli [0..<N*M] ctd (λi. f (i div N) (i mod N))"
(is "?lhs=?rhs")
proof -
have "RETURN (?lhs s) = RETURN (?rhs s)" for s
apply (subst foldli_eq_nfoldli)+
apply (subst nfoldli_prod_divmod_conv')
..
thus ?thesis by auto
qed
lemma fold_prod_divmod_conv': "fold (λi. fold (f i) [0..<N]) [0..<M] = fold (λi. f (i div N) (i mod N)) [0..<N*M]"
using foldli_prod_divmod_conv'[of M "λ_. True" N f, THEN fun_cong]
apply (intro ext)
apply (simp add: foldli_foldl foldl_conv_fold)
done
lemma mtx_nonzero_cases[consumes 0, case_names nonzero zero]:
obtains "(i,j)∈mtx_nonzero m" | "m (i,j) = 0"
by (auto simp: mtx_nonzero_def)
subsubsection ‹Unary Pointwise›
definition mtx_pointwise_unop :: "(nat×nat ⇒ 'a ⇒ 'a) ⇒ 'a mtx ⇒ 'a mtx" where
"mtx_pointwise_unop f m ≡ λ(i,j). f (i,j) (m(i,j))"
context fixes f :: "nat×nat ⇒ 'a ⇒ 'a" begin
sepref_register "PR_CONST (mtx_pointwise_unop f)" :: "'a i_mtx ⇒ 'a i_mtx"
lemma [def_pat_rules]: "mtx_pointwise_unop$f ≡ UNPROTECT (mtx_pointwise_unop f)" by simp
end
locale mtx_pointwise_unop_loc =
fixes N :: nat and M :: nat
fixes f :: "(nat×nat) ⇒ 'a::{zero} ⇒ 'a"
assumes pres_zero[simp]: "⟦ i≥N ∨ j≥M ⟧ ⟹ f (i,j) 0 = 0"
begin
definition "opr_fold_impl ≡ fold (λi. fold (λj m. m( (i,j) := f (i,j) (m(i,j)) )) [0..<M]) [0..<N]"
lemma opr_fold_impl_eq:
assumes "mtx_nonzero m ⊆ {0..<N}×{0..<M}"
shows "mtx_pointwise_unop f m = opr_fold_impl m"
apply (rule ext)
unfolding opr_fold_impl_def
apply (simp add: fold_fold_prod_conv)
apply (subst pointwise_fun_fold)
apply (auto simp: mtx_pointwise_unop_def distinct_product) [3]
apply clarsimp
subgoal for a b
apply (cases a b m rule: mtx_nonzero_cases)
using assms
apply (auto simp: mtx_pointwise_unop_def)
done
done
lemma opr_fold_impl_refine: "(opr_fold_impl, mtx_pointwise_unop f) ∈ [λm. mtx_nonzero m ⊆ {0..<N}×{0..<M}]⇩f Id → Id"
apply (rule frefI)
using opr_fold_impl_eq
by auto
end
locale mtx_pointwise_unop_gen_impl = mtx_pointwise_unop_loc +
fixes assn :: "'a mtx ⇒ 'i ⇒ assn"
fixes A :: "'a ⇒ 'ai ⇒ assn"
fixes get_impl :: "'i ⇒ nat×nat ⇒ 'ai Heap"
fixes set_impl :: "'i ⇒ nat×nat ⇒ 'ai ⇒ 'i Heap"
fixes fi :: "nat×nat ⇒ 'ai ⇒ 'ai Heap"
assumes assn_range: "rdomp assn m ⟹ mtx_nonzero m ⊆ {0..<N}×{0..<M}"
assumes get_impl_hnr:
"(uncurry get_impl,uncurry (RETURN oo op_mtx_get)) ∈ assn⇧k *⇩a (prod_assn (nbn_assn N) (nbn_assn M))⇧k →⇩a A"
assumes set_impl_hnr:
"(uncurry2 set_impl,uncurry2 (RETURN ooo op_mtx_set)) ∈ assn⇧d *⇩a (prod_assn (nbn_assn N) (nbn_assn M))⇧k *⇩a A⇧k →⇩a assn"
assumes fi_hnr:
"(uncurry fi,uncurry (RETURN oo f)) ∈ (prod_assn nat_assn nat_assn)⇧k *⇩a A⇧k →⇩a A"
begin
lemma this_loc: "mtx_pointwise_unop_gen_impl N M f assn A get_impl set_impl fi"
by unfold_locales
context
notes [[sepref_register_adhoc f N M]]
notes [intf_of_assn] = intf_of_assnI[where R=assn and 'a="'a i_mtx"]
notes [sepref_import_param] = IdI[of N] IdI[of M]
notes [sepref_fr_rules] = get_impl_hnr set_impl_hnr fi_hnr
begin
sepref_thm opr_fold_impl1 is "RETURN o opr_fold_impl" :: "assn⇧d →⇩a assn"
unfolding opr_fold_impl_def
supply [[goals_limit = 1]]
by sepref
end
concrete_definition (in -) mtx_pointwise_unnop_fold_impl1 uses mtx_pointwise_unop_gen_impl.opr_fold_impl1.refine_raw
prepare_code_thms (in -) mtx_pointwise_unnop_fold_impl1_def
lemma op_hnr[sepref_fr_rules]: "(mtx_pointwise_unnop_fold_impl1 N M get_impl set_impl fi, RETURN ∘ PR_CONST (mtx_pointwise_unop f)) ∈ assn⇧d →⇩a assn"
unfolding PR_CONST_def
apply (rule hfref_weaken_pre'[OF _ mtx_pointwise_unnop_fold_impl1.refine[OF this_loc,FCOMP opr_fold_impl_refine]])
by (simp add: assn_range)
end
subsubsection ‹Binary Pointwise›
definition mtx_pointwise_binop :: "('a ⇒ 'a ⇒ 'a) ⇒ 'a mtx ⇒ 'a mtx ⇒ 'a mtx" where
"mtx_pointwise_binop f m n ≡ λ(i,j). f (m(i,j)) (n(i,j))"
context fixes f :: "'a ⇒ 'a ⇒ 'a" begin
sepref_register "PR_CONST (mtx_pointwise_binop f)" :: "'a i_mtx ⇒ 'a i_mtx ⇒ 'a i_mtx"
lemma [def_pat_rules]: "mtx_pointwise_binop$f ≡ UNPROTECT (mtx_pointwise_binop f)" by simp
end
locale mtx_pointwise_binop_loc =
fixes N :: nat and M :: nat
fixes f :: "'a::{zero} ⇒ 'a ⇒ 'a"
assumes pres_zero[simp]: "f 0 0 = 0"
begin
definition "opr_fold_impl m n ≡ fold (λi. fold (λj m. m( (i,j) := f (m(i,j)) (n(i,j)) )) [0..<M]) [0..<N] m"
lemma opr_fold_impl_eq:
assumes "mtx_nonzero m ⊆ {0..<N}×{0..<M}"
assumes "mtx_nonzero n ⊆ {0..<N}×{0..<M}"
shows "mtx_pointwise_binop f m n = opr_fold_impl m n"
apply (rule ext)
unfolding opr_fold_impl_def
apply (simp add: fold_fold_prod_conv)
apply (subst pointwise_fun_fold)
apply (auto simp: mtx_pointwise_binop_def distinct_product) [3]
apply clarsimp
subgoal for a b
apply (cases a b m rule: mtx_nonzero_cases; cases a b n rule: mtx_nonzero_cases)
using assms
apply (auto simp: mtx_pointwise_binop_def)
done
done
lemma opr_fold_impl_refine: "(uncurry opr_fold_impl, uncurry (mtx_pointwise_binop f)) ∈ [λ(m,n). mtx_nonzero m ⊆ {0..<N}×{0..<M} ∧ mtx_nonzero n ⊆ {0..<N}×{0..<M}]⇩f Id×⇩rId → Id"
apply (rule frefI)
using opr_fold_impl_eq
by auto
end
locale mtx_pointwise_binop_gen_impl = mtx_pointwise_binop_loc +
fixes assn :: "'a mtx ⇒ 'i ⇒ assn"
fixes A :: "'a ⇒ 'ai ⇒ assn"
fixes get_impl :: "'i ⇒ nat×nat ⇒ 'ai Heap"
fixes set_impl :: "'i ⇒ nat×nat ⇒ 'ai ⇒ 'i Heap"
fixes fi :: "'ai ⇒ 'ai ⇒ 'ai Heap"
assumes assn_range: "rdomp assn m ⟹ mtx_nonzero m ⊆ {0..<N}×{0..<M}"
assumes get_impl_hnr:
"(uncurry get_impl,uncurry (RETURN oo op_mtx_get)) ∈ assn⇧k *⇩a (prod_assn (nbn_assn N) (nbn_assn M))⇧k →⇩a A"
assumes set_impl_hnr:
"(uncurry2 set_impl,uncurry2 (RETURN ooo op_mtx_set)) ∈ assn⇧d *⇩a (prod_assn (nbn_assn N) (nbn_assn M))⇧k *⇩a A⇧k →⇩a assn"
assumes fi_hnr:
"(uncurry fi,uncurry (RETURN oo f)) ∈ A⇧k *⇩a A⇧k →⇩a A"
begin
lemma this_loc: "mtx_pointwise_binop_gen_impl N M f assn A get_impl set_impl fi"
by unfold_locales
context
notes [[sepref_register_adhoc f N M]]
notes [intf_of_assn] = intf_of_assnI[where R=assn and 'a="'a i_mtx"]
notes [sepref_import_param] = IdI[of N] IdI[of M]
notes [sepref_fr_rules] = get_impl_hnr set_impl_hnr fi_hnr
begin
sepref_thm opr_fold_impl1 is "uncurry (RETURN oo opr_fold_impl)" :: "assn⇧d*⇩aassn⇧k →⇩a assn"
unfolding opr_fold_impl_def[abs_def]
by sepref
end
concrete_definition (in -) mtx_pointwise_binop_fold_impl1
uses mtx_pointwise_binop_gen_impl.opr_fold_impl1.refine_raw is "(uncurry ?f,_)∈_"
prepare_code_thms (in -) mtx_pointwise_binop_fold_impl1_def
lemma op_hnr[sepref_fr_rules]: "(uncurry (mtx_pointwise_binop_fold_impl1 N M get_impl set_impl fi), uncurry (RETURN oo PR_CONST (mtx_pointwise_binop f))) ∈ assn⇧d *⇩a assn⇧k →⇩a assn"
unfolding PR_CONST_def
apply (rule hfref_weaken_pre'[OF _ mtx_pointwise_binop_fold_impl1.refine[OF this_loc,FCOMP opr_fold_impl_refine]])
apply (auto dest: assn_range)
done
end
subsubsection ‹Compare Pointwise›
definition mtx_pointwise_cmpop :: "('a ⇒ 'a ⇒ bool) ⇒ ('a ⇒ 'a ⇒ bool) ⇒ 'a mtx ⇒ 'a mtx ⇒ bool" where
"mtx_pointwise_cmpop f g m n ≡ (∀i j. f (m(i,j)) (n(i,j))) ∧ (∃i j. g (m(i,j)) (n(i,j)))"
context fixes f g :: "'a ⇒ 'a ⇒ bool" begin
sepref_register "PR_CONST (mtx_pointwise_cmpop f g)" :: "'a i_mtx ⇒ 'a i_mtx ⇒ bool"
lemma [def_pat_rules]: "mtx_pointwise_cmpop$f$g ≡ UNPROTECT (mtx_pointwise_cmpop f g)" by simp
end
lemma mtx_nonzeroD:
"⟦¬i<N; mtx_nonzero m ⊆ {0..<N}×{0..<M}⟧ ⟹ m(i,j) = 0"
"⟦¬j<M; mtx_nonzero m ⊆ {0..<N}×{0..<M}⟧ ⟹ m(i,j) = 0"
by (auto simp: mtx_nonzero_def)
locale mtx_pointwise_cmpop_loc =
fixes N :: nat and M :: nat
fixes f g :: "'a::{zero} ⇒ 'a ⇒ bool"
assumes pres_zero[simp]: "f 0 0 = True" "g 0 0 = False"
begin
definition "opr_fold_impl m n ≡ do {
s ← nfoldli (List.product [0..<N] [0..<M]) (λs. s≠2) (λ(i,j) s. do {
if f (m(i,j)) (n(i,j)) then
if s=0 then
if g (m(i,j)) (n(i,j)) then RETURN 1 else RETURN s
else
RETURN s
else RETURN 2
}) (0::nat);
RETURN (s=1)
}"
lemma opr_fold_impl_eq:
assumes "mtx_nonzero m ⊆ {0..<N}×{0..<M}"
assumes "mtx_nonzero n ⊆ {0..<N}×{0..<M}"
shows "opr_fold_impl m n ≤ RETURN (mtx_pointwise_cmpop f g m n)"
proof -
have "(∀i<N. ∀j<M. f (m (i, j)) (n (i, j))) ⟹ f (m (i, j)) (n (i, j))" for i j
apply (cases "i<N"; cases "j<M")
using assms by (auto simp: mtx_nonzeroD)
moreover have "g (m (i, j)) (n (i, j)) ⟹ (∃i<N. ∃j<M. g (m (i, j)) (n (i, j)))" for i j
apply (cases "i<N"; cases "j<M")
using assms by (auto simp: mtx_nonzeroD)
ultimately have EQ: "mtx_pointwise_cmpop f g m n
⟷ (∀i<N. ∀j<M. f (m(i,j)) (n(i,j))) ∧ (∃i<N. ∃j<M. g (m(i,j)) (n(i,j)))"
unfolding mtx_pointwise_cmpop_def by meson
have aux: "List.product [0..<N] [0..<M] = l1 @ (i, j) # l2 ⟹ i<N ∧ j<M" for l1 i j l2
proof -
assume "List.product [0..<N] [0..<M] = l1 @ (i, j) # l2"
hence "(i,j)∈set (List.product [0..<N] [0..<M])" by simp
thus ?thesis by simp
qed
show ?thesis
unfolding opr_fold_impl_def
apply (refine_vcg
nfoldli_rule[where I="λl1 _ s.
if s=2 then ∃i<N. ∃j<M. ¬f (m(i,j)) (n(i,j))
else (
(s=0 ∨ s=1) ∧
(∀(i,j)∈set l1. f (m(i,j)) (n(i,j))) ∧
(s=1 ⟷ (∃(i,j)∈set l1. g (m(i,j)) (n(i,j))))
)"]
)
apply (vc_solve dest: aux solve: asm_rl simp: EQ) [6]
apply (fastforce simp: EQ)
done
qed
lemma opr_fold_impl_refine:
"(uncurry opr_fold_impl, uncurry (RETURN oo mtx_pointwise_cmpop f g)) ∈ [λ(m,n). mtx_nonzero m ⊆ {0..<N}×{0..<M} ∧ mtx_nonzero n ⊆ {0..<N}×{0..<M}]⇩f Id×⇩rId → ⟨bool_rel⟩nres_rel"
apply (rule frefI)
using opr_fold_impl_eq
by (auto intro: nres_relI)
end
locale mtx_pointwise_cmpop_gen_impl = mtx_pointwise_cmpop_loc +
fixes assn :: "'a mtx ⇒ 'i ⇒ assn"
fixes A :: "'a ⇒ 'ai ⇒ assn"
fixes get_impl :: "'i ⇒ nat×nat ⇒ 'ai Heap"
fixes fi :: "'ai ⇒ 'ai ⇒ bool Heap"
fixes gi :: "'ai ⇒ 'ai ⇒ bool Heap"
assumes assn_range: "rdomp assn m ⟹ mtx_nonzero m ⊆ {0..<N}×{0..<M}"
assumes get_impl_hnr:
"(uncurry get_impl,uncurry (RETURN oo op_mtx_get)) ∈ assn⇧k *⇩a (prod_assn (nbn_assn N) (nbn_assn M))⇧k →⇩a A"
assumes fi_hnr:
"(uncurry fi,uncurry (RETURN oo f)) ∈ A⇧k *⇩a A⇧k →⇩a bool_assn"
assumes gi_hnr:
"(uncurry gi,uncurry (RETURN oo g)) ∈ A⇧k *⇩a A⇧k →⇩a bool_assn"
begin
lemma this_loc: "mtx_pointwise_cmpop_gen_impl N M f g assn A get_impl fi gi"
by unfold_locales
context
notes [[sepref_register_adhoc f g N M]]
notes [intf_of_assn] = intf_of_assnI[where R=assn and 'a="'a i_mtx"]
notes [sepref_import_param] = IdI[of N] IdI[of M]
notes [sepref_fr_rules] = get_impl_hnr fi_hnr gi_hnr
begin
sepref_thm opr_fold_impl1 is "uncurry opr_fold_impl" :: "assn⇧d*⇩aassn⇧k →⇩a bool_assn"
unfolding opr_fold_impl_def[abs_def] nfoldli_nfoldli_prod_conv[symmetric]
by sepref
end
concrete_definition (in -) mtx_pointwise_cmpop_fold_impl1
uses mtx_pointwise_cmpop_gen_impl.opr_fold_impl1.refine_raw is "(uncurry ?f,_)∈_"
prepare_code_thms (in -) mtx_pointwise_cmpop_fold_impl1_def
lemma op_hnr[sepref_fr_rules]: "(uncurry (mtx_pointwise_cmpop_fold_impl1 N M get_impl fi gi), uncurry (RETURN oo PR_CONST (mtx_pointwise_cmpop f g))) ∈ assn⇧d *⇩a assn⇧k →⇩a bool_assn"
unfolding PR_CONST_def
apply (rule hfref_weaken_pre'[OF _ mtx_pointwise_cmpop_fold_impl1.refine[OF this_loc,FCOMP opr_fold_impl_refine]])
apply (auto dest: assn_range)
done
end
end
Theory IICF_Array_Matrix
section ‹Matrices by Array (Row-Major)›
theory IICF_Array_Matrix
imports "../Intf/IICF_Matrix" Separation_Logic_Imperative_HOL.Array_Blit
begin
definition "is_amtx N M c mtx ≡ ∃⇩Al. mtx ↦⇩a l * ↑(
length l = N*M
∧ (∀i<N. ∀j<M. l!(i*M+j) = c (i,j))
∧ (∀i j. (i≥N ∨ j≥M) ⟶ c (i,j) = 0))"
lemma is_amtx_precise[safe_constraint_rules]: "precise (is_amtx N M)"
apply rule
unfolding is_amtx_def
apply clarsimp
apply prec_extract_eqs
apply (rule ext)
apply (rename_tac x)
apply (case_tac x; simp)
apply (rename_tac i j)
apply (case_tac "i<N"; case_tac "j<M"; simp)
done
lemma is_amtx_bounded:
shows "rdomp (is_amtx N M) m ⟹ mtx_nonzero m ⊆ {0..<N}×{0..<M}"
unfolding rdomp_def
apply (clarsimp simp: mtx_nonzero_def is_amtx_def)
by (meson not_less)
definition "mtx_tabulate N M c ≡ do {
m ← Array.new (N*M) 0;
(_,_,m) ← imp_for' 0 (N*M) (λk (i,j,m). do {
Array.upd k (c (i,j)) m;
let j=j+1;
if j<M then return (i,j,m)
else return (i+1,0,m)
}) (0,0,m);
return m
}"
definition "amtx_copy ≡ array_copy"
definition "amtx_dflt N M v ≡ Array.make (N*M) (λi. v)"
definition "mtx_get M mtx e ≡ Array.nth mtx (fst e * M + snd e)"
definition "mtx_set M mtx e v ≡ Array.upd (fst e * M + snd e) v mtx"
lemma mtx_idx_valid[simp]: "⟦i < (N::nat); j < M⟧ ⟹ i * M + j < N * M"
by (rule mlex_bound)
lemma mtx_idx_unique_conv[simp]:
fixes M :: nat
assumes "j<M" "j'<M"
shows "(i * M + j = i' * M + j') ⟷ (i=i' ∧ j=j')"
using assms
apply auto
subgoal
by (metis add_right_cancel div_if div_mult_self3 linorder_neqE_nat not_less0)
subgoal
using ‹⟦j < M; j' < M; i * M + j = i' * M + j'⟧ ⟹ i = i'› by auto
done
lemma mtx_tabulate_rl[sep_heap_rules]:
assumes NONZ: "mtx_nonzero c ⊆ {0..<N}×{0..<M}"
shows "<emp> mtx_tabulate N M c <IICF_Array_Matrix.is_amtx N M c>"
proof (cases "M=0")
case True thus ?thesis
unfolding mtx_tabulate_def
using mtx_nonzeroD[OF _ NONZ]
by (sep_auto simp: is_amtx_def)
next
case False hence M_POS: "0<M" by auto
show ?thesis
unfolding mtx_tabulate_def
apply (sep_auto
decon:
imp_for'_rule[where
I="λk (i,j,mi). ∃⇩Am. mi ↦⇩a m
* ↑( k=i*M+j ∧ j<M ∧ k≤N*M ∧ length m = N*M )
* ↑( ∀i'<i. ∀j<M. m!(i'*M+j) = c (i',j) )
* ↑( ∀j'<j. m!(i*M+j') = c (i,j') )
"]
simp: nth_list_update M_POS dest: Suc_lessI
)
unfolding is_amtx_def
using mtx_nonzeroD[OF _ NONZ]
apply sep_auto
by (metis add.right_neutral M_POS mtx_idx_unique_conv)
qed
lemma mtx_copy_rl[sep_heap_rules]:
"<is_amtx N M c mtx> amtx_copy mtx <λr. is_amtx N M c mtx * is_amtx N M c r>"
by (sep_auto simp: amtx_copy_def is_amtx_def)
definition "PRES_ZERO_UNIQUE A ≡ (A``{0}={0} ∧ A¯``{0} = {0})"
lemma IS_ID_imp_PRES_ZERO_UNIQUE[constraint_rules]: "IS_ID A ⟹ PRES_ZERO_UNIQUE A"
unfolding IS_ID_def PRES_ZERO_UNIQUE_def by auto
definition op_amtx_dfltNxM :: "nat ⇒ nat ⇒ 'a::zero ⇒ nat×nat⇒'a" where
[simp]: "op_amtx_dfltNxM N M v ≡ λ(i,j). if i<N ∧ j<M then v else 0"
context fixes N M::nat begin
sepref_decl_op (no_def) op_amtx_dfltNxM: "op_amtx_dfltNxM N M" :: "A → ⟨A⟩mtx_rel"
where "CONSTRAINT PRES_ZERO_UNIQUE A"
apply (rule fref_ncI) unfolding op_amtx_dfltNxM_def[abs_def] mtx_rel_def
apply parametricity
by (auto simp add: PRES_ZERO_UNIQUE_def)
end
lemma mtx_dflt_rl[sep_heap_rules]: "<emp> amtx_dflt N M k <is_amtx N M (op_amtx_dfltNxM N M k)>"
by (sep_auto simp: amtx_dflt_def is_amtx_def)
lemma mtx_get_rl[sep_heap_rules]: "⟦i<N; j<M ⟧ ⟹ <is_amtx N M c mtx> mtx_get M mtx (i,j) <λr. is_amtx N M c mtx * ↑(r = c (i,j))>"
by (sep_auto simp: mtx_get_def is_amtx_def)
lemma mtx_set_rl[sep_heap_rules]: "⟦i<N; j<M ⟧
⟹ <is_amtx N M c mtx> mtx_set M mtx (i,j) v <λr. is_amtx N M (c((i,j) := v)) r>"
by (sep_auto simp: mtx_set_def is_amtx_def nth_list_update)
definition "amtx_assn N M A ≡ hr_comp (is_amtx N M) (⟨the_pure A⟩mtx_rel)"
lemmas [fcomp_norm_unfold] = amtx_assn_def[symmetric]
lemmas [safe_constraint_rules] = CN_FALSEI[of is_pure "amtx_assn N M A" for N M A]
lemma [intf_of_assn]: "intf_of_assn A TYPE('a) ⟹ intf_of_assn (amtx_assn N M A) TYPE('a i_mtx)"
by simp
abbreviation "asmtx_assn N A ≡ amtx_assn N N A"
lemma mtx_rel_pres_zero:
assumes "PRES_ZERO_UNIQUE A"
assumes "(m,m')∈⟨A⟩mtx_rel"
shows "m ij = 0 ⟷ m' ij = 0"
using assms
apply1 (clarsimp simp: IS_PURE_def PRES_ZERO_UNIQUE_def is_pure_conv mtx_rel_def)
apply (drule fun_relD) applyS (rule IdI[of ij]) applyS auto
done
lemma amtx_assn_bounded:
assumes "CONSTRAINT (IS_PURE PRES_ZERO_UNIQUE) A"
shows "rdomp (amtx_assn N M A) m ⟹ mtx_nonzero m ⊆ {0..<N}×{0..<M}"
apply (clarsimp simp: mtx_nonzero_def amtx_assn_def rdomp_hrcomp_conv)
apply (drule is_amtx_bounded)
using assms
by (fastforce simp: IS_PURE_def is_pure_conv mtx_rel_pres_zero[symmetric] mtx_nonzero_def)
lemma mtx_tabulate_aref:
"(mtx_tabulate N M, RETURN o op_mtx_new)
∈ [λc. mtx_nonzero c ⊆ {0..<N}×{0..<M}]⇩a id_assn⇧k → IICF_Array_Matrix.is_amtx N M"
by sepref_to_hoare sep_auto
lemma mtx_copy_aref:
"(amtx_copy, RETURN o op_mtx_copy) ∈ (is_amtx N M)⇧k →⇩a is_amtx N M"
apply rule apply rule
apply (sep_auto simp: pure_def)
done
lemma mtx_nonzero_bid_eq:
assumes "R⊆Id"
assumes "(a, a') ∈ Id → R"
shows "mtx_nonzero a = mtx_nonzero a'"
using assms
apply (clarsimp simp: mtx_nonzero_def)
apply (metis fun_relE2 pair_in_Id_conv subsetCE)
done
lemma mtx_nonzero_zu_eq:
assumes "PRES_ZERO_UNIQUE R"
assumes "(a, a') ∈ Id → R"
shows "mtx_nonzero a = mtx_nonzero a'"
using assms
apply (clarsimp simp: mtx_nonzero_def PRES_ZERO_UNIQUE_def)
by (metis (no_types, hide_lams) IdI Image_singleton_iff converse_iff singletonD tagged_fun_relD_none)
lemma op_mtx_new_fref':
"CONSTRAINT PRES_ZERO_UNIQUE A ⟹ (RETURN ∘ op_mtx_new, RETURN ∘ op_mtx_new) ∈ (nat_rel ×⇩r nat_rel → A) →⇩f ⟨⟨A⟩mtx_rel⟩nres_rel"
by (rule op_mtx_new.fref)
sepref_decl_impl (no_register) amtx_new_by_tab: mtx_tabulate_aref uses op_mtx_new_fref'
by (auto simp: mtx_nonzero_zu_eq)
sepref_decl_impl amtx_copy: mtx_copy_aref .
definition [simp]: "op_amtx_new (N::nat) (M::nat) ≡ op_mtx_new"
lemma amtx_fold_custom_new:
"op_mtx_new ≡ op_amtx_new N M"
"mop_mtx_new ≡ λc. RETURN (op_amtx_new N M c)"
by (auto simp: mop_mtx_new_alt[abs_def])
context fixes N M :: nat begin
sepref_register "PR_CONST (op_amtx_new N M)" :: "(nat × nat ⇒ 'a) ⇒ 'a i_mtx"
end
lemma amtx_new_hnr[sepref_fr_rules]:
fixes A :: "'a::zero ⇒ 'b::{zero,heap} ⇒ assn"
shows "CONSTRAINT (IS_PURE PRES_ZERO_UNIQUE) A ⟹
(mtx_tabulate N M, (RETURN ∘ PR_CONST (op_amtx_new N M)))
∈ [λx. mtx_nonzero x ⊆ {0..<N} × {0..<M}]⇩a (pure (nat_rel ×⇩r nat_rel → the_pure A))⇧k → amtx_assn N M A"
using amtx_new_by_tab_hnr[of A N M] by simp
lemma [def_pat_rules]: "op_amtx_new$N$M ≡ UNPROTECT (op_amtx_new N M)" by simp
context fixes N M :: nat notes [param] = IdI[of N] IdI[of M] begin
lemma mtx_dflt_aref:
"(amtx_dflt N M, RETURN o PR_CONST (op_amtx_dfltNxM N M)) ∈ id_assn⇧k →⇩a is_amtx N M"
apply rule apply rule
apply (sep_auto simp: pure_def)
done
sepref_decl_impl amtx_dflt: mtx_dflt_aref .
lemma amtx_get_aref:
"(uncurry (mtx_get M), uncurry (RETURN oo op_mtx_get)) ∈ [λ(_,(i,j)). i<N ∧ j<M]⇩a (is_amtx N M)⇧k *⇩a (prod_assn nat_assn nat_assn)⇧k → id_assn"
apply rule apply rule
apply (sep_auto simp: pure_def)
done
sepref_decl_impl amtx_get: amtx_get_aref .
lemma amtx_set_aref: "(uncurry2 (mtx_set M), uncurry2 (RETURN ooo op_mtx_set))
∈ [λ((_,(i,j)),_). i<N ∧ j<M]⇩a (is_amtx N M)⇧d *⇩a (prod_assn nat_assn nat_assn)⇧k *⇩a id_assn⇧k → is_amtx N M"
apply rule apply (rule hn_refine_preI) apply rule
apply (sep_auto simp: pure_def hn_ctxt_def invalid_assn_def)
done
sepref_decl_impl amtx_set: amtx_set_aref .
lemma amtx_get_aref':
"(uncurry (mtx_get M), uncurry (RETURN oo op_mtx_get)) ∈ (is_amtx N M)⇧k *⇩a (prod_assn (pure (nbn_rel N)) (pure (nbn_rel M)))⇧k →⇩a id_assn"
apply rule apply rule
apply (sep_auto simp: pure_def IS_PURE_def IS_ID_def)
done
sepref_decl_impl amtx_get': amtx_get_aref' .
lemma amtx_set_aref': "(uncurry2 (mtx_set M), uncurry2 (RETURN ooo op_mtx_set))
∈ (is_amtx N M)⇧d *⇩a (prod_assn (pure (nbn_rel N)) (pure (nbn_rel M)))⇧k *⇩a id_assn⇧k →⇩a is_amtx N M"
apply rule apply (rule hn_refine_preI) apply rule
apply (sep_auto simp: pure_def hn_ctxt_def invalid_assn_def IS_PURE_def IS_ID_def)
done
sepref_decl_impl amtx_set': amtx_set_aref' .
end
subsection ‹Pointwise Operations›
context
fixes M N :: nat
begin
sepref_decl_op amtx_lin_get: "λf i. op_mtx_get f (i div M, i mod M)" :: "⟨A⟩mtx_rel → nat_rel → A"
unfolding op_mtx_get_def mtx_rel_def
by (rule frefI) (parametricity; simp)
sepref_decl_op amtx_lin_set: "λf i x. op_mtx_set f (i div M, i mod M) x" :: "⟨A⟩mtx_rel → nat_rel → A → ⟨A⟩mtx_rel"
unfolding op_mtx_set_def mtx_rel_def
apply (rule frefI) apply parametricity by simp_all
lemma op_amtx_lin_get_aref: "(uncurry Array.nth, uncurry (RETURN oo PR_CONST op_amtx_lin_get)) ∈ [λ(_,i). i<N*M]⇩a (is_amtx N M)⇧k *⇩a nat_assn⇧k → id_assn"
apply sepref_to_hoare
unfolding is_amtx_def
apply sep_auto
apply (metis mult.commute div_eq_0_iff div_mult2_eq div_mult_mod_eq mod_less_divisor mult_is_0 not_less0)
done
sepref_decl_impl amtx_lin_get: op_amtx_lin_get_aref by auto
lemma op_amtx_lin_set_aref: "(uncurry2 (λm i x. Array.upd i x m), uncurry2 (RETURN ooo PR_CONST op_amtx_lin_set)) ∈ [λ((_,i),_). i<N*M]⇩a (is_amtx N M)⇧d *⇩a nat_assn⇧k *⇩a id_assn⇧k → is_amtx N M"
proof -
have [simp]: "i < N * M ⟹ ¬(M ≤ i mod M)" for i
by (cases "N = 0 ∨ M = 0") (auto simp add: not_le)
have [simp]: "i < N * M ⟹ ¬(N ≤ i div M)" for i
apply (cases "N = 0 ∨ M = 0")
apply (auto simp add: not_le)
apply (metis mult.commute div_eq_0_iff div_mult2_eq neq0_conv)
done
show ?thesis
apply sepref_to_hoare
unfolding is_amtx_def
by (sep_auto simp: nth_list_update)
qed
sepref_decl_impl amtx_lin_set: op_amtx_lin_set_aref by auto
end
lemma amtx_fold_lin_get: "m (i div M, i mod M) = op_amtx_lin_get M m i" by simp
lemma amtx_fold_lin_set: "m ((i div M, i mod M) := x) = op_amtx_lin_set M m i x" by simp
locale amtx_pointwise_unop_impl = mtx_pointwise_unop_loc +
fixes A :: "'a ⇒ 'ai::{zero,heap} ⇒ assn"
fixes fi :: "nat×nat ⇒ 'ai ⇒ 'ai Heap"
assumes fi_hnr:
"(uncurry fi,uncurry (RETURN oo f)) ∈ (prod_assn nat_assn nat_assn)⇧k *⇩a A⇧k →⇩a A"
begin
lemma this_loc: "amtx_pointwise_unop_impl N M f A fi" by unfold_locales
context
assumes PURE: "CONSTRAINT (IS_PURE PRES_ZERO_UNIQUE) A"
begin
context
notes [[sepref_register_adhoc f N M]]
notes [sepref_import_param] = IdI[of N] IdI[of M]
notes [sepref_fr_rules] = fi_hnr
notes [safe_constraint_rules] = PURE
notes [simp] = algebra_simps
begin
sepref_thm opr_fold_impl1 is "RETURN o opr_fold_impl" :: "(amtx_assn N M A)⇧d →⇩a amtx_assn N M A"
unfolding opr_fold_impl_def fold_prod_divmod_conv'
apply (rewrite amtx_fold_lin_set)
apply (rewrite in "f _ ⌑" amtx_fold_lin_get)
by sepref
end
end
concrete_definition (in -) amtx_pointwise_unnop_fold_impl1 uses amtx_pointwise_unop_impl.opr_fold_impl1.refine_raw
prepare_code_thms (in -) amtx_pointwise_unnop_fold_impl1_def
lemma op_hnr[sepref_fr_rules]:
assumes PURE: "CONSTRAINT (IS_PURE PRES_ZERO_UNIQUE) A"
shows "(amtx_pointwise_unnop_fold_impl1 N M fi, RETURN ∘ PR_CONST (mtx_pointwise_unop f)) ∈ (amtx_assn N M A)⇧d →⇩a amtx_assn N M A"
unfolding PR_CONST_def
apply (rule hfref_weaken_pre'[OF _ amtx_pointwise_unnop_fold_impl1.refine[OF this_loc PURE,FCOMP opr_fold_impl_refine]])
by (simp add: amtx_assn_bounded[OF PURE])
end
locale amtx_pointwise_binop_impl = mtx_pointwise_binop_loc +
fixes A :: "'a ⇒ 'ai::{zero,heap} ⇒ assn"
fixes fi :: "'ai ⇒ 'ai ⇒ 'ai Heap"
assumes fi_hnr: "(uncurry fi,uncurry (RETURN oo f)) ∈ A⇧k *⇩a A⇧k →⇩a A"
begin
lemma this_loc: "amtx_pointwise_binop_impl f A fi"
by unfold_locales
context
notes [[sepref_register_adhoc f N M]]
notes [sepref_import_param] = IdI[of N] IdI[of M]
notes [sepref_fr_rules] = fi_hnr
assumes PURE[safe_constraint_rules]: "CONSTRAINT (IS_PURE PRES_ZERO_UNIQUE) A"
notes [simp] = algebra_simps
begin
sepref_thm opr_fold_impl1 is "uncurry (RETURN oo opr_fold_impl)" :: "(amtx_assn N M A)⇧d*⇩a(amtx_assn N M A)⇧k →⇩a amtx_assn N M A"
unfolding opr_fold_impl_def[abs_def] fold_prod_divmod_conv'
apply (rewrite amtx_fold_lin_set)
apply (rewrite in "f ⌑ _" amtx_fold_lin_get)
apply (rewrite in "f _ ⌑" amtx_fold_lin_get)
by sepref
end
concrete_definition (in -) amtx_pointwise_binop_fold_impl1 for fi N M
uses amtx_pointwise_binop_impl.opr_fold_impl1.refine_raw is "(uncurry ?f,_)∈_"
prepare_code_thms (in -) amtx_pointwise_binop_fold_impl1_def
lemma op_hnr[sepref_fr_rules]:
assumes PURE: "CONSTRAINT (IS_PURE PRES_ZERO_UNIQUE) A"
shows "(uncurry (amtx_pointwise_binop_fold_impl1 fi N M), uncurry (RETURN oo PR_CONST (mtx_pointwise_binop f))) ∈ (amtx_assn N M A)⇧d *⇩a (amtx_assn N M A)⇧k →⇩a amtx_assn N M A"
unfolding PR_CONST_def
apply (rule hfref_weaken_pre'[OF _ amtx_pointwise_binop_fold_impl1.refine[OF this_loc PURE,FCOMP opr_fold_impl_refine]])
apply (auto dest: amtx_assn_bounded[OF PURE])
done
end
locale amtx_pointwise_cmpop_impl = mtx_pointwise_cmpop_loc +
fixes A :: "'a ⇒ 'ai::{zero,heap} ⇒ assn"
fixes fi :: "'ai ⇒ 'ai ⇒ bool Heap"
fixes gi :: "'ai ⇒ 'ai ⇒ bool Heap"
assumes fi_hnr:
"(uncurry fi,uncurry (RETURN oo f)) ∈ A⇧k *⇩a A⇧k →⇩a bool_assn"
assumes gi_hnr:
"(uncurry gi,uncurry (RETURN oo g)) ∈ A⇧k *⇩a A⇧k →⇩a bool_assn"
begin
lemma this_loc: "amtx_pointwise_cmpop_impl f g A fi gi"
by unfold_locales
context
notes [[sepref_register_adhoc f g N M]]
notes [sepref_import_param] = IdI[of N] IdI[of M]
notes [sepref_fr_rules] = fi_hnr gi_hnr
assumes PURE[safe_constraint_rules]: "CONSTRAINT (IS_PURE PRES_ZERO_UNIQUE) A"
begin
sepref_thm opr_fold_impl1 is "uncurry opr_fold_impl" :: "(amtx_assn N M A)⇧d*⇩a(amtx_assn N M A)⇧k →⇩a bool_assn"
unfolding opr_fold_impl_def[abs_def] nfoldli_prod_divmod_conv
apply (rewrite in "f ⌑ _" amtx_fold_lin_get)
apply (rewrite in "f _ ⌑" amtx_fold_lin_get)
apply (rewrite in "g ⌑ _" amtx_fold_lin_get)
apply (rewrite in "g _ ⌑" amtx_fold_lin_get)
by sepref
end
concrete_definition (in -) amtx_pointwise_cmpop_fold_impl1 for N M fi gi
uses amtx_pointwise_cmpop_impl.opr_fold_impl1.refine_raw is "(uncurry ?f,_)∈_"
prepare_code_thms (in -) amtx_pointwise_cmpop_fold_impl1_def
lemma op_hnr[sepref_fr_rules]:
assumes PURE: "CONSTRAINT (IS_PURE PRES_ZERO_UNIQUE) A"
shows "(uncurry (amtx_pointwise_cmpop_fold_impl1 N M fi gi), uncurry (RETURN oo PR_CONST (mtx_pointwise_cmpop f g))) ∈ (amtx_assn N M A)⇧d *⇩a (amtx_assn N M A)⇧k →⇩a bool_assn"
unfolding PR_CONST_def
apply (rule hfref_weaken_pre'[OF _ amtx_pointwise_cmpop_fold_impl1.refine[OF this_loc PURE,FCOMP opr_fold_impl_refine]])
apply (auto dest: amtx_assn_bounded[OF PURE])
done
end
subsection ‹Regression Test and Usage Example›
context begin
text ‹To work with a matrix, the dimension should be fixed in a context›
context
fixes N M :: nat
notes [[sepref_register_adhoc N M]]
notes [sepref_import_param] = IdI[of N] IdI[of M]
fixes dummy:: "'a::{times,zero,heap}"
begin
text ‹First, we implement scalar multiplication with destructive update
of the matrix:›
private definition scmul :: "'a ⇒ 'a mtx ⇒ 'a mtx nres" where
"scmul x m ≡ nfoldli [0..<N] (λ_. True) (λi m.
nfoldli [0..<M] (λ_. True) (λj m. do {
let mij = m(i,j);
RETURN (m((i,j) := x * mij))
}
) m
) m"
text ‹After declaration of an implementation for multiplication,
refinement is straightforward. Note that we use the fixed @{term N} in
the refinement assertions.›
private lemma times_param: "((*),(*)::'a⇒_) ∈ Id → Id → Id" by simp
context
notes [sepref_import_param] = times_param
begin
sepref_definition scmul_impl
is "uncurry scmul" :: "(id_assn⇧k *⇩a (amtx_assn N M id_assn)⇧d →⇩a amtx_assn N M id_assn)"
unfolding scmul_def[abs_def]
by sepref
end
text ‹Initialization with default value›
private definition "init_test ≡ do {
let m = op_amtx_dfltNxM 10 5 (0::nat);
RETURN (m(1,2))
}"
private sepref_definition init_test_impl is "uncurry0 init_test" :: "unit_assn⇧k→⇩anat_assn"
unfolding init_test_def
by sepref
text ‹Initialization from function diagonal is more complicated:
First, we have to define the function as a new constant›
qualified definition "diagonalN k ≡ λ(i,j). if i=j ∧ j<N then k else 0"
text ‹If it carries implicit parameters, we have to wrap it into a @{term PR_CONST} tag:›
private sepref_register "PR_CONST diagonalN"
private lemma [def_pat_rules]: "IICF_Array_Matrix.diagonalN$N ≡ UNPROTECT diagonalN" by simp
text ‹Then, we have to implement the constant, where the result assertion must be for a
pure function. Note that, due to technical reasons, we need the ‹the_pure› in the function type,
and the refinement rule to be parameterized over an assertion variable (here ‹A›).
Of course, you can constrain ‹A› further, e.g., @{term "CONSTRAINT (IS_PURE IS_ID) (A::int ⇒ int ⇒ assn)"}
›
private lemma diagonalN_hnr[sepref_fr_rules]:
assumes "CONSTRAINT (IS_PURE PRES_ZERO_UNIQUE) A"
shows "(return o diagonalN, RETURN o (PR_CONST diagonalN)) ∈ A⇧k →⇩a pure (nat_rel ×⇩r nat_rel → the_pure A)"
using assms
apply sepref_to_hoare
apply (sep_auto simp: diagonalN_def is_pure_conv IS_PURE_def PRES_ZERO_UNIQUE_def )
done
text ‹In order to discharge preconditions, we need to prove some auxiliary lemma
that non-zero indexes are within range›
lemma diagonal_nonzero_ltN[simp]: "(a,b)∈mtx_nonzero (diagonalN k) ⟹ a<N ∧ b<N"
by (auto simp: mtx_nonzero_def diagonalN_def split: if_split_asm)
private definition "init_test2 ≡ do {
ASSERT (N>2);
let m = op_mtx_new (diagonalN (1::int));
RETURN (m(1,2))
}"
private sepref_definition init_test2_impl is "uncurry0 init_test2" :: "unit_assn⇧k→⇩aint_assn"
unfolding init_test2_def amtx_fold_custom_new[of N N]
by sepref
end
export_code scmul_impl in SML_imp
end
hide_const scmul_impl
hide_const(open) is_amtx
end
Theory IICF_Sepl_Binding
section ‹Sepref Bindings for Imp/HOL Collections›
theory IICF_Sepl_Binding
imports
Separation_Logic_Imperative_HOL.Imp_Map_Spec
Separation_Logic_Imperative_HOL.Imp_Set_Spec
Separation_Logic_Imperative_HOL.Imp_List_Spec
Separation_Logic_Imperative_HOL.Hash_Map_Impl
Separation_Logic_Imperative_HOL.Array_Map_Impl
Separation_Logic_Imperative_HOL.To_List_GA
Separation_Logic_Imperative_HOL.Hash_Set_Impl
Separation_Logic_Imperative_HOL.Array_Set_Impl
Separation_Logic_Imperative_HOL.Open_List
Separation_Logic_Imperative_HOL.Circ_List
"../Intf/IICF_Map"
"../Intf/IICF_Set"
"../Intf/IICF_List"
Collections.Locale_Code
begin
text ‹This theory binds collection data structures from the
basic collection framework established in
‹AFP/Separation_Logic_Imperative_HOL› for usage with Sepref.
›
locale imp_map_contains_key = imp_map +
constrains is_map :: "('k ⇀ 'v) ⇒ 'm ⇒ assn"
fixes contains_key :: "'k ⇒ 'm ⇒ bool Heap"
assumes contains_key_rule[sep_heap_rules]:
"<is_map m p> contains_key k p <λr. is_map m p * ↑(r⟷k∈dom m)>⇩t"
locale gen_contains_key_by_lookup = imp_map_lookup
begin
definition "contains_key k m ≡ do {r ← lookup k m; return (¬is_None r)}"
sublocale imp_map_contains_key is_map contains_key
apply unfold_locales
unfolding contains_key_def
apply (sep_auto split: option.splits)
done
end
locale imp_list_tail = imp_list +
constrains is_list :: "'a list ⇒ 'l ⇒ assn"
fixes tail :: "'l ⇒ 'l Heap"
assumes tail_rule[sep_heap_rules]:
"l≠[] ⟹ <is_list l p> tail p <is_list (tl l)>⇩t"
definition os_head :: "'a::heap os_list ⇒ ('a) Heap" where
"os_head p ≡ case p of
None ⇒ raise STR ''os_Head: Empty list''
| Some p ⇒ do { m ←!p; return (val m) }"
primrec os_tl :: "'a::heap os_list ⇒ ('a os_list) Heap" where
"os_tl None = raise STR ''os_tl: Empty list''"
| "os_tl (Some p) = do { m ←!p; return (next m) }"
interpretation os: imp_list_head os_list os_head
by unfold_locales (sep_auto simp: os_head_def neq_Nil_conv)
interpretation os: imp_list_tail os_list os_tl
by unfold_locales (sep_auto simp: os_tl_def neq_Nil_conv)
definition cs_is_empty :: "'a::heap cs_list ⇒ bool Heap" where
"cs_is_empty p ≡ return (is_None p)"
interpretation cs: imp_list_is_empty cs_list cs_is_empty
by unfold_locales (sep_auto simp: cs_is_empty_def split: option.splits)
definition cs_head :: "'a::heap cs_list ⇒ 'a Heap" where
"cs_head p ≡ case p of
None ⇒ raise STR ''cs_head: Empty list''
| Some p ⇒ do { n ← !p; return (val n)}"
interpretation cs: imp_list_head cs_list cs_head
by unfold_locales (sep_auto simp: neq_Nil_conv cs_head_def)
definition cs_tail :: "'a::heap cs_list ⇒ 'a cs_list Heap" where
"cs_tail p ≡ do { (_,r) ← cs_pop p; return r }"
interpretation cs: imp_list_tail cs_list cs_tail
by unfold_locales (sep_auto simp: cs_tail_def)
lemma is_hashmap_finite[simp]: "h ⊨ is_hashmap m mi ⟹ finite (dom m)"
unfolding is_hashmap_def is_hashmap'_def
by auto
lemma is_hashset_finite[simp]: "h ⊨ is_hashset s si ⟹ finite s"
unfolding is_hashset_def
by (auto dest: is_hashmap_finite)
definition "ias_is_it s a si ≡ λ(a',i).
∃⇩Al. a↦⇩al * ↑(a'=a ∧ s=ias_of_list l ∧ (i=length l ∧ si={} ∨ i<length l ∧ i∈s ∧ si=s ∩ {x. x≥i} ))
"
context begin
private function first_memb where
"first_memb lmax a i = do {
if i<lmax then do {
x ← Array.nth a i;
if x then return i else first_memb lmax a (Suc i)
} else
return i
}"
by pat_completeness auto
termination by (relation "measure (λ(l,_,i). l-i)") auto
declare first_memb.simps[simp del]
private lemma first_memb_rl_aux:
assumes "lmax ≤ length l" "i≤lmax"
shows
"< a ↦⇩a l >
first_memb lmax a i
<λk. a↦⇩a l * ↑(k≤lmax ∧ (∀j. i≤j ∧ j<k ⟶ ¬l!j) ∧ i≤k ∧ (k=lmax ∨ l!k)) >"
using assms
proof (induction lmax a i rule: first_memb.induct)
case (1 lmax a i)
show ?case
apply (subst first_memb.simps)
using "1.prems"
apply (sep_auto heap: "1.IH"; ((sep_auto;fail) | metis eq_iff not_less_eq_eq))
done
qed
private lemma first_memb_rl[sep_heap_rules]:
assumes "lmax ≤ length l" "i≤lmax"
shows "< a ↦⇩a l >
first_memb lmax a i
<λk. a↦⇩a l * ↑(ias_of_list l ∩ {i..<k} = {} ∧ i≤k ∧ (k<lmax ∧ k∈ias_of_list l ∨ k=lmax) ) >"
using assms
by (sep_auto simp: ias_of_list_def heap: first_memb_rl_aux)
definition "ias_it_init a = do {
l ← Array.len a;
i ← first_memb l a 0;
return (a,i)
}"
definition "ias_it_has_next ≡ λ(a,i). do {
l ← Array.len a;
return (i<l)
}"
definition "ias_it_next ≡ λ(a,i). do {
l ← Array.len a;
i' ← first_memb l a (Suc i);
return (i,(a,i'))
}"
lemma ias_of_list_bound: "ias_of_list l ⊆ {0..<length l}" by (auto simp: ias_of_list_def)
end
interpretation ias: imp_set_iterate is_ias ias_is_it ias_it_init ias_it_has_next ias_it_next
apply unfold_locales
unfolding is_ias_def ias_is_it_def
unfolding ias_it_init_def using ias_of_list_bound
apply (sep_auto)
unfolding ias_it_next_def using ias_of_list_bound
apply (sep_auto; fastforce)
unfolding ias_it_has_next_def
apply sep_auto
apply sep_auto
done
lemma ias_of_list_finite[simp, intro!]: "finite (ias_of_list l)"
using finite_subset[OF ias_of_list_bound] by auto
lemma is_ias_finite[simp]: "h ⊨ is_ias S x ⟹ finite S"
unfolding is_ias_def by auto
lemma to_list_ga_rec_rule:
assumes "imp_set_iterate is_set is_it it_init it_has_next it_next"
assumes "imp_list_prepend is_list l_prepend"
assumes FIN: "finite it"
assumes DIS: "distinct l" "set l ∩ it = {}"
shows "
< is_it s si it iti * is_list l li >
to_list_ga_rec it_has_next it_next l_prepend iti li
< λr. ∃⇩Al'. is_set s si
* is_list l' r
* ↑(distinct l' ∧ set l' = set l ∪ it) >⇩t"
proof -
interpret imp_set_iterate is_set is_it it_init it_has_next it_next
+ imp_list_prepend is_list l_prepend
by fact+
from FIN DIS show ?thesis
proof (induction arbitrary: l li iti rule: finite_psubset_induct)
case (psubset it)
show ?case
apply (subst to_list_ga_rec.simps)
using psubset.prems apply (sep_auto heap: psubset.IH)
apply (rule ent_frame_fwd[OF quit_iteration])
apply frame_inference
apply solve_entails
done
qed
qed
lemma to_list_ga_rule:
assumes IT: "imp_set_iterate is_set is_it it_init it_has_next it_next"
assumes EM: "imp_list_empty is_list l_empty"
assumes PREP: "imp_list_prepend is_list l_prepend"
assumes FIN: "finite s"
shows "
<is_set s si>
to_list_ga it_init it_has_next it_next
l_empty l_prepend si
<λr. ∃⇩Al. is_set s si * is_list l r * true * ↑(distinct l ∧ set l = s)>"
proof -
interpret imp_list_empty is_list l_empty +
imp_set_iterate is_set is_it it_init it_has_next it_next
by fact+
note [sep_heap_rules] = to_list_ga_rec_rule[OF IT PREP]
show ?thesis
unfolding to_list_ga_def
by (sep_auto simp: FIN)
qed
subsection ‹Binding Locales›
method solve_sepl_binding = (
unfold_locales;
(unfold option_assn_pure_conv)?;
sep_auto
intro!: hfrefI hn_refineI[THEN hn_refine_preI]
simp: invalid_assn_def hn_ctxt_def pure_def
)
subsubsection ‹Map›
locale bind_map = imp_map is_map for is_map :: "('ki ⇀ 'vi) ⇒ 'm ⇒ assn"
begin
definition "assn K V ≡ hr_comp is_map (⟨the_pure K,the_pure V⟩map_rel)"
lemmas [fcomp_norm_unfold] = assn_def[symmetric]
lemmas [safe_constraint_rules] = CN_FALSEI[of is_pure "assn K V" for K V]
end
locale bind_map_empty = imp_map_empty + bind_map
begin
lemma empty_hnr_aux: "(uncurry0 empty,uncurry0 (RETURN op_map_empty)) ∈ unit_assn⇧k →⇩a is_map"
by solve_sepl_binding
sepref_decl_impl (no_register) empty: empty_hnr_aux .
end
locale bind_map_is_empty = imp_map_is_empty + bind_map
begin
lemma is_empty_hnr_aux: "(is_empty,RETURN o op_map_is_empty) ∈ is_map⇧k →⇩a bool_assn"
by solve_sepl_binding
sepref_decl_impl is_empty: is_empty_hnr_aux .
end
locale bind_map_update = imp_map_update + bind_map
begin
lemma update_hnr_aux: "(uncurry2 update,uncurry2 (RETURN ooo op_map_update)) ∈ id_assn⇧k *⇩a id_assn⇧k *⇩a is_map⇧d →⇩a is_map"
by solve_sepl_binding
sepref_decl_impl update: update_hnr_aux .
end
locale bind_map_delete = imp_map_delete + bind_map
begin
lemma delete_hnr_aux: "(uncurry delete,uncurry (RETURN oo op_map_delete)) ∈ id_assn⇧k *⇩a is_map⇧d →⇩a is_map"
by solve_sepl_binding
sepref_decl_impl delete: delete_hnr_aux .
end
locale bind_map_lookup = imp_map_lookup + bind_map
begin
lemma lookup_hnr_aux: "(uncurry lookup,uncurry (RETURN oo op_map_lookup)) ∈ id_assn⇧k *⇩a is_map⇧k →⇩a id_assn"
by solve_sepl_binding
sepref_decl_impl lookup: lookup_hnr_aux .
end
locale bind_map_contains_key = imp_map_contains_key + bind_map
begin
lemma contains_key_hnr_aux: "(uncurry contains_key,uncurry (RETURN oo op_map_contains_key)) ∈ id_assn⇧k *⇩a is_map⇧k →⇩a bool_assn"
by solve_sepl_binding
sepref_decl_impl contains_key: contains_key_hnr_aux .
end
subsubsection ‹Set›
locale bind_set = imp_set is_set for is_set :: "('ai set) ⇒ 'm ⇒ assn" +
fixes A :: "'a ⇒ 'ai ⇒ assn"
begin
definition "assn ≡ hr_comp is_set (⟨the_pure A⟩set_rel)"
lemmas [safe_constraint_rules] = CN_FALSEI[of is_pure "assn"]
end
locale bind_set_setup = bind_set
begin
lemmas [fcomp_norm_unfold] = assn_def[symmetric]
lemma APA: "⟦PROP Q; CONSTRAINT is_pure A⟧ ⟹ PROP Q" .
lemma APAlu: "⟦PROP Q; CONSTRAINT (IS_PURE IS_LEFT_UNIQUE) A⟧ ⟹ PROP Q" .
lemma APAru: "⟦PROP Q; CONSTRAINT (IS_PURE IS_RIGHT_UNIQUE) A⟧ ⟹ PROP Q" .
lemma APAbu: "⟦PROP Q; CONSTRAINT (IS_PURE IS_LEFT_UNIQUE) A; CONSTRAINT (IS_PURE IS_RIGHT_UNIQUE) A⟧ ⟹ PROP Q" .
end
locale bind_set_empty = imp_set_empty + bind_set
begin
lemma hnr_empty_aux: "(uncurry0 empty,uncurry0 (RETURN op_set_empty))∈unit_assn⇧k →⇩a is_set"
by solve_sepl_binding
interpretation bind_set_setup by standard
lemmas hnr_op_empty = hnr_empty_aux[FCOMP op_set_empty.fref[where A="the_pure A"]]
lemmas hnr_mop_empty = hnr_op_empty[FCOMP mk_mop_rl0_np[OF mop_set_empty_alt]]
end
locale bind_set_is_empty = imp_set_is_empty + bind_set
begin
lemma hnr_is_empty_aux: "(is_empty, RETURN o op_set_is_empty)∈is_set⇧k →⇩a bool_assn"
by solve_sepl_binding
interpretation bind_set_setup by standard
lemmas hnr_op_is_empty[sepref_fr_rules] = hnr_is_empty_aux[THEN APA,FCOMP op_set_is_empty.fref[where A="the_pure A"]]
lemmas hnr_mop_is_empty[sepref_fr_rules] = hnr_op_is_empty[FCOMP mk_mop_rl1_np[OF mop_set_is_empty_alt]]
end
locale bind_set_member = imp_set_memb + bind_set
begin
lemma hnr_member_aux: "(uncurry memb, uncurry (RETURN oo op_set_member))∈id_assn⇧k *⇩a is_set⇧k →⇩a bool_assn"
by solve_sepl_binding
interpretation bind_set_setup by standard
lemmas hnr_op_member[sepref_fr_rules] = hnr_member_aux[THEN APAbu,FCOMP op_set_member.fref[where A="the_pure A"]]
lemmas hnr_mop_member[sepref_fr_rules] = hnr_op_member[FCOMP mk_mop_rl2_np[OF mop_set_member_alt]]
end
locale bind_set_insert = imp_set_ins + bind_set
begin
lemma hnr_insert_aux: "(uncurry ins, uncurry (RETURN oo op_set_insert))∈id_assn⇧k *⇩a is_set⇧d →⇩a is_set"
by solve_sepl_binding
interpretation bind_set_setup by standard
lemmas hnr_op_insert[sepref_fr_rules] = hnr_insert_aux[THEN APAru,FCOMP op_set_insert.fref[where A="the_pure A"]]
lemmas hnr_mop_insert[sepref_fr_rules] = hnr_op_insert[FCOMP mk_mop_rl2_np[OF mop_set_insert_alt]]
end
locale bind_set_delete = imp_set_delete + bind_set
begin
lemma hnr_delete_aux: "(uncurry delete, uncurry (RETURN oo op_set_delete))∈id_assn⇧k *⇩a is_set⇧d →⇩a is_set"
by solve_sepl_binding
interpretation bind_set_setup by standard
lemmas hnr_op_delete[sepref_fr_rules] = hnr_delete_aux[THEN APAbu,FCOMP op_set_delete.fref[where A="the_pure A"]]
lemmas hnr_mop_delete[sepref_fr_rules] = hnr_op_delete[FCOMP mk_mop_rl2_np[OF mop_set_delete_alt]]
end
primrec sorted_wrt' where
"sorted_wrt' R [] ⟷ True"
| "sorted_wrt' R (x#xs) ⟷ list_all (R x) xs ∧ sorted_wrt' R xs"
lemma sorted_wrt'_eq: "sorted_wrt' = sorted_wrt"
proof (intro ext iffI)
fix R :: "'a ⇒ 'a ⇒ bool" and xs :: "'a list"
{
assume "sorted_wrt R xs"
thus "sorted_wrt' R xs"
by (induction xs)(auto simp: list_all_iff sorted_sorted_wrt[symmetric])
}
{
assume "sorted_wrt' R xs"
thus "sorted_wrt R xs"
by (induction xs) (auto simp: list_all_iff)
}
qed
lemma param_sorted_wrt[param]: "(sorted_wrt, sorted_wrt) ∈ (A → A → bool_rel) → ⟨A⟩list_rel → bool_rel"
unfolding sorted_wrt'_eq[symmetric] sorted_wrt'_def
by parametricity
lemma obtain_list_from_setrel:
assumes SV: "single_valued A"
assumes "(set l,s) ∈ ⟨A⟩set_rel"
obtains m where "s=set m" "(l,m)∈⟨A⟩list_rel"
using assms(2)
proof (induction l arbitrary: s thesis)
case Nil
show ?case
apply (rule Nil(1)[where m="[]"])
using Nil(2)
by auto
next
case (Cons x l)
obtain s' y where "s=insert y s'" "(x,y)∈A" "(set l,s')∈⟨A⟩set_rel"
proof -
from Cons.prems(2) obtain y where X0: "y∈s" "(x,y)∈A"
unfolding set_rel_def by auto
from Cons.prems(2) have
X1: "∀a∈set l. ∃b∈s. (a,b)∈A" and
X2: "∀b∈s. ∃a∈insert x (set l). (a,b)∈A"
unfolding set_rel_def by auto
show ?thesis proof (cases "∃a∈set l. (a,y)∈A")
case True
show ?thesis
apply (rule that[of y s])
subgoal using X0 by auto
subgoal by fact
subgoal
apply (rule set_relI)
subgoal using X1 by blast
subgoal by (metis IS_RIGHT_UNIQUED SV True X0(2) X2 insert_iff)
done
done
next
case False
show ?thesis
apply (rule that[of y "s-{y}"])
subgoal using X0 by auto
subgoal by fact
subgoal
apply (rule set_relI)
subgoal using False X1 by fastforce
subgoal using IS_RIGHT_UNIQUED SV X0(2) X2 by fastforce
done
done
qed
qed
moreover from Cons.IH[OF _ ‹(set l,s')∈⟨A⟩set_rel›] obtain m where "s'=set m" "(l,m)∈⟨A⟩list_rel" .
ultimately show thesis
apply -
apply (rule Cons.prems(1)[of "y#m"])
by auto
qed
lemma param_it_to_sorted_list[param]: "⟦IS_LEFT_UNIQUE A; IS_RIGHT_UNIQUE A⟧ ⟹ (it_to_sorted_list, it_to_sorted_list) ∈ (A → A → bool_rel) → ⟨A⟩set_rel → ⟨⟨A⟩list_rel⟩nres_rel"
unfolding it_to_sorted_list_def[abs_def]
apply (auto simp: it_to_sorted_list_def pw_nres_rel_iff refine_pw_simps)
apply (rule obtain_list_from_setrel; assumption?; clarsimp)
apply (intro exI conjI; assumption?)
using param_distinct[param_fo] apply blast
apply simp
using param_sorted_wrt[param_fo] apply blast
done
locale bind_set_iterate = imp_set_iterate + bind_set +
assumes is_set_finite: "h ⊨ is_set S x ⟹ finite S"
begin
context begin
private lemma is_imp_set_iterate: "imp_set_iterate is_set is_it it_init it_has_next it_next" by unfold_locales
private lemma is_imp_list_empty: "imp_list_empty (list_assn id_assn) (return [])"
apply unfold_locales
apply solve_constraint
apply sep_auto
done
private lemma is_imp_list_prepend: "imp_list_prepend (list_assn id_assn) (return oo List.Cons)"
apply unfold_locales
apply solve_constraint
apply (sep_auto simp: pure_def)
done
definition "to_list ≡ to_list_ga it_init it_has_next it_next (return []) (return oo List.Cons)"
private lemmas tl_rl = to_list_ga_rule[OF is_imp_set_iterate is_imp_list_empty is_imp_list_prepend, folded to_list_def]
private lemma to_list_sorted1: "(to_list,PR_CONST (it_to_sorted_list (λ_ _. True))) ∈ is_set⇧k →⇩a list_assn id_assn"
unfolding PR_CONST_def
apply (intro hfrefI)
apply (rule hn_refine_preI)
apply (rule hn_refineI)
unfolding it_to_sorted_list_def
apply (sep_auto intro: hfrefI hn_refineI intro: is_set_finite heap: tl_rl)
done
private lemma to_list_sorted2: "⟦
CONSTRAINT (IS_PURE IS_LEFT_UNIQUE) A;
CONSTRAINT (IS_PURE IS_RIGHT_UNIQUE) A⟧ ⟹
(PR_CONST (it_to_sorted_list (λ_ _. True)), PR_CONST (it_to_sorted_list (λ_ _. True))) ∈ ⟨the_pure A⟩set_rel → ⟨⟨the_pure A⟩list_rel⟩nres_rel"
unfolding PR_CONST_def CONSTRAINT_def IS_PURE_def
by clarify parametricity
lemmas to_list_hnr = to_list_sorted1[FCOMP to_list_sorted2, folded assn_def]
lemmas to_list_is_to_sorted_list = IS_TO_SORTED_LISTI[OF to_list_hnr]
lemma to_list_gen[sepref_gen_algo_rules]: "⟦CONSTRAINT (IS_PURE IS_LEFT_UNIQUE) A; CONSTRAINT (IS_PURE IS_RIGHT_UNIQUE) A⟧
⟹ GEN_ALGO to_list (IS_TO_SORTED_LIST (λ_ _. True) (bind_set.assn is_set A) A)"
by (simp add: GEN_ALGO_def to_list_is_to_sorted_list)
end
end
subsubsection ‹List›
locale bind_list = imp_list is_list for is_list :: "('ai list) ⇒ 'm ⇒ assn" +
fixes A :: "'a ⇒ 'ai ⇒ assn"
begin
definition "assn ≡ hr_comp is_list (⟨the_pure A⟩list_rel)"
lemmas [safe_constraint_rules] = CN_FALSEI[of is_pure "assn"]
end
locale bind_list_empty = imp_list_empty + bind_list
begin
lemma hnr_aux: "(uncurry0 empty,uncurry0 (RETURN op_list_empty))∈(pure unit_rel)⇧k →⇩a is_list"
apply rule apply rule apply (sep_auto simp: pure_def) done
lemmas hnr
= hnr_aux[FCOMP op_list_empty.fref[of "the_pure A"], folded assn_def]
lemmas hnr_mop = hnr[FCOMP mk_mop_rl0_np[OF mop_list_empty_alt]]
end
locale bind_list_is_empty = imp_list_is_empty + bind_list
begin
lemma hnr_aux: "(is_empty,RETURN o op_list_is_empty)∈(is_list)⇧k →⇩a pure bool_rel"
apply rule apply rule apply (sep_auto simp: pure_def) done
lemmas hnr[sepref_fr_rules]
= hnr_aux[FCOMP op_list_is_empty.fref, of "the_pure A", folded assn_def]
lemmas hnr_mop[sepref_fr_rules] = hnr[FCOMP mk_mop_rl1_np[OF mop_list_is_empty_alt]]
end
locale bind_list_append = imp_list_append + bind_list
begin
lemma hnr_aux: "(uncurry (swap_args2 append),uncurry (RETURN oo op_list_append))
∈(is_list)⇧d *⇩a (pure Id)⇧k →⇩a is_list" by solve_sepl_binding
lemmas hnr[sepref_fr_rules]
= hnr_aux[FCOMP op_list_append.fref,of A, folded assn_def]
lemmas hnr_mop[sepref_fr_rules] = hnr[FCOMP mk_mop_rl2_np[OF mop_list_append_alt]]
end
locale bind_list_prepend = imp_list_prepend + bind_list
begin
lemma hnr_aux: "(uncurry prepend,uncurry (RETURN oo op_list_prepend))
∈(pure Id)⇧k *⇩a (is_list)⇧d →⇩a is_list" by solve_sepl_binding
lemmas hnr[sepref_fr_rules]
= hnr_aux[FCOMP op_list_prepend.fref,of A, folded assn_def]
lemmas hnr_mop[sepref_fr_rules] = hnr[FCOMP mk_mop_rl2_np[OF mop_list_prepend_alt]]
end
locale bind_list_hd = imp_list_head + bind_list
begin
lemma hnr_aux: "(head,RETURN o op_list_hd)
∈[λl. l≠[]]⇩a (is_list)⇧d → pure Id" by solve_sepl_binding
lemmas hnr[sepref_fr_rules] = hnr_aux[FCOMP op_list_hd.fref,of A, folded assn_def]
lemmas hnr_mop[sepref_fr_rules] = hnr[FCOMP mk_mop_rl1[OF mop_list_hd_alt]]
end
locale bind_list_tl = imp_list_tail + bind_list
begin
lemma hnr_aux: "(tail,RETURN o op_list_tl)
∈[λl. l≠[]]⇩a (is_list)⇧d → is_list"
by solve_sepl_binding
lemmas hnr[sepref_fr_rules] = hnr_aux[FCOMP op_list_tl.fref,of "the_pure A", folded assn_def]
lemmas hnr_mop[sepref_fr_rules] = hnr[FCOMP mk_mop_rl1[OF mop_list_tl_alt]]
end
locale bind_list_rotate1 = imp_list_rotate + bind_list
begin
lemma hnr_aux: "(rotate,RETURN o op_list_rotate1)
∈(is_list)⇧d →⇩a is_list"
by solve_sepl_binding
lemmas hnr[sepref_fr_rules] = hnr_aux[FCOMP op_list_rotate1.fref,of "the_pure A", folded assn_def]
lemmas hnr_mop[sepref_fr_rules] = hnr[FCOMP mk_mop_rl1_np[OF mop_list_rotate1_alt]]
end
locale bind_list_rev = imp_list_reverse + bind_list
begin
lemma hnr_aux: "(reverse,RETURN o op_list_rev)
∈(is_list)⇧d →⇩a is_list"
by solve_sepl_binding
lemmas hnr[sepref_fr_rules] = hnr_aux[FCOMP op_list_rev.fref,of "the_pure A", folded assn_def]
lemmas hnr_mop[sepref_fr_rules] = hnr[FCOMP mk_mop_rl1_np[OF mop_list_rev_alt]]
end
subsection ‹Array Map (iam)›
definition "op_iam_empty ≡ IICF_Map.op_map_empty"
interpretation iam: bind_map_empty is_iam iam_new
by unfold_locales
interpretation iam: map_custom_empty op_iam_empty
by unfold_locales (simp add: op_iam_empty_def)
lemmas [sepref_fr_rules] = iam.empty_hnr[folded op_iam_empty_def]
definition [simp]: "op_iam_empty_sz (N::nat) ≡ IICF_Map.op_map_empty"
lemma [def_pat_rules]: "op_iam_empty_sz$N ≡ UNPROTECT (op_iam_empty_sz N)"
by simp
interpretation iam_sz: map_custom_empty "PR_CONST (op_iam_empty_sz N)"
apply unfold_locales
apply (simp)
done
lemma [sepref_fr_rules]: "(uncurry0 iam_new, uncurry0 (RETURN (PR_CONST (op_iam_empty_sz N)))) ∈ unit_assn⇧k →⇩a iam.assn K V"
using iam.empty_hnr[of K V] by simp
interpretation iam: bind_map_update is_iam Array_Map_Impl.iam_update
by unfold_locales
interpretation iam: bind_map_delete is_iam Array_Map_Impl.iam_delete
by unfold_locales
interpretation iam: bind_map_lookup is_iam Array_Map_Impl.iam_lookup
by unfold_locales
setup Locale_Code.open_block
interpretation iam: gen_contains_key_by_lookup is_iam Array_Map_Impl.iam_lookup
by unfold_locales
setup Locale_Code.close_block
interpretation iam: bind_map_contains_key is_iam iam.contains_key
by unfold_locales
subsection ‹Array Set (ias)›
definition [simp]: "op_ias_empty ≡ op_set_empty"
interpretation ias: bind_set_empty is_ias ias_new for A
by unfold_locales
interpretation ias: set_custom_empty ias_new op_ias_empty
by unfold_locales simp
lemmas [sepref_fr_rules] = ias.hnr_op_empty[folded op_ias_empty_def]
definition [simp]: "op_ias_empty_sz (N::nat) ≡ op_set_empty"
lemma [def_pat_rules]: "op_ias_empty_sz$N ≡ UNPROTECT (op_ias_empty_sz N)"
by simp
interpretation ias_sz: bind_set_empty is_ias "ias_new_sz N" for N A
by unfold_locales
interpretation ias_sz: set_custom_empty "ias_new_sz N" "PR_CONST (op_ias_empty_sz N)" for A
by unfold_locales simp
lemma [sepref_fr_rules]:
"(uncurry0 (ias_new_sz N), uncurry0 (RETURN (PR_CONST (op_ias_empty_sz N)))) ∈ unit_assn⇧k →⇩a ias.assn A"
using ias_sz.hnr_op_empty[of N A] by simp
interpretation ias: bind_set_member is_ias Array_Set_Impl.ias_memb for A
by unfold_locales
interpretation ias: bind_set_insert is_ias Array_Set_Impl.ias_ins for A
by unfold_locales
interpretation ias: bind_set_delete is_ias Array_Set_Impl.ias_delete for A
by unfold_locales
setup Locale_Code.open_block
interpretation ias: bind_set_iterate is_ias ias_is_it ias_it_init ias_it_has_next ias_it_next for A
by unfold_locales auto
setup Locale_Code.close_block
subsection ‹Hash Map (hm)›
interpretation hm: bind_map_empty is_hashmap hm_new
by unfold_locales
definition "op_hm_empty ≡ IICF_Map.op_map_empty"
interpretation hm: map_custom_empty op_hm_empty
by unfold_locales (simp add: op_hm_empty_def)
lemmas [sepref_fr_rules] = hm.empty_hnr[folded op_hm_empty_def]
interpretation hm: bind_map_is_empty is_hashmap Hash_Map.hm_isEmpty
by unfold_locales
interpretation hm: bind_map_update is_hashmap Hash_Map.hm_update
by unfold_locales
interpretation hm: bind_map_delete is_hashmap Hash_Map.hm_delete
by unfold_locales
interpretation hm: bind_map_lookup is_hashmap Hash_Map.hm_lookup
by unfold_locales
setup Locale_Code.open_block
interpretation hm: gen_contains_key_by_lookup is_hashmap Hash_Map.hm_lookup
by unfold_locales
setup Locale_Code.close_block
interpretation hm: bind_map_contains_key is_hashmap hm.contains_key
by unfold_locales
subsection ‹Hash Set (hs)›
interpretation hs: bind_set_empty is_hashset hs_new for A
by unfold_locales
definition "op_hs_empty ≡ IICF_Set.op_set_empty"
interpretation hs: set_custom_empty hs_new op_hs_empty for A
by unfold_locales (simp add: op_hs_empty_def)
lemmas [sepref_fr_rules] = hs.hnr_op_empty[folded op_hs_empty_def]
interpretation hs: bind_set_is_empty is_hashset Hash_Set_Impl.hs_isEmpty for A
by unfold_locales
interpretation hs: bind_set_member is_hashset Hash_Set_Impl.hs_memb for A
by unfold_locales
interpretation hs: bind_set_insert is_hashset Hash_Set_Impl.hs_ins for A
by unfold_locales
interpretation hs: bind_set_delete is_hashset Hash_Set_Impl.hs_delete for A
by unfold_locales
setup Locale_Code.open_block
interpretation hs: bind_set_iterate is_hashset hs_is_it hs_it_init hs_it_has_next hs_it_next for A
by unfold_locales simp
setup Locale_Code.close_block
subsection ‹Open Singly Linked List (osll)›
interpretation osll: bind_list os_list for A by unfold_locales
interpretation osll_empty: bind_list_empty os_list os_empty for A
by unfold_locales
definition "osll_empty ≡ op_list_empty"
interpretation osll: list_custom_empty "osll.assn A" os_empty osll_empty
apply unfold_locales
apply (rule osll_empty.hnr)
by (simp add: osll_empty_def)
interpretation osll_is_empty: bind_list_is_empty os_list os_is_empty for A
by unfold_locales
interpretation osll_prepend: bind_list_prepend os_list os_prepend for A
by unfold_locales
interpretation osll_hd: bind_list_hd os_list os_head for A
by unfold_locales
interpretation osll_tl: bind_list_tl os_list os_tl for A
by unfold_locales
interpretation osll_rev: bind_list_rev os_list os_reverse for A
by unfold_locales
subsection ‹Circular Singly Linked List (csll)›
interpretation csll: bind_list cs_list for A by unfold_locales
interpretation csll_empty: bind_list_empty cs_list cs_empty for A
by unfold_locales
definition "csll_empty ≡ op_list_empty"
interpretation csll: list_custom_empty "csll.assn A" cs_empty csll_empty
apply unfold_locales
apply (rule csll_empty.hnr)
by (simp add: csll_empty_def)
interpretation csll_is_empty: bind_list_is_empty cs_list cs_is_empty for A
by unfold_locales
interpretation csll_prepend: bind_list_prepend cs_list cs_prepend for A
by unfold_locales
interpretation csll_append: bind_list_append cs_list cs_append for A
by unfold_locales
interpretation csll_hd: bind_list_hd cs_list cs_head for A
by unfold_locales
interpretation csll_tl: bind_list_tl cs_list cs_tail for A
by unfold_locales
interpretation csll_rotate1: bind_list_rotate1 cs_list cs_rotate for A
by unfold_locales
schematic_goal "hn_refine (emp) (?c::?'c Heap) ?Γ' ?R (do {
x ← mop_list_empty;
RETURN (1 ∈ dom [1::nat ↦ True, 2↦False], {1,2::nat}, 1#(2::nat)#x)
})"
apply (subst iam_sz.fold_custom_empty[where N=10])
apply (subst hs.fold_custom_empty)
apply (subst osll.fold_custom_empty)
by sepref
end
Theory Sepref_Chapter_Userguides
chapter ‹User Guides›
text ‹This chapter contains the available user guides.›
theory Sepref_Chapter_Userguides
imports Main
begin
end
Theory Sepref_Guide_Quickstart
section ‹Quickstart Guide›
theory Sepref_Guide_Quickstart
imports "../IICF/IICF"
begin
subsection ‹Introduction›
text ‹
Sepref is an Isabelle/HOL tool to semi-automatically synthesize
imperative code from abstract specifications.
The synthesis works by replacing operations on abstract data
by operations on concrete data, leaving the structure of the program
(mostly) unchanged. Speref proves a refinement theorem, stating the
relation between the abstract and generated concrete specification.
The concrete specification can then be converted to executable code using
the Isabelle/HOL code generator.
This quickstart guide is best appreciated in the Isabelle IDE (currently Isabelle/jedit),
such that you can use cross-referencing and see intermediate proof states.
›
subsubsection ‹Prerequisites›
text ‹
Sepref is a tool for experienced Isabelle/HOL users. So, this
quickstart guide assumes some familiarity with Isabelel/HOL, and will not
explain standard Isabelle/HOL techniques.
Sepref is based on Imperative/HOL (@{theory "HOL-Imperative_HOL.Imperative_HOL"}) and the Isabelle Refinement Framework (@{theory Refine_Monadic.Refine_Monadic}).
It makes extensive use of the Separation logic formalization for Imperative/HOL (@{theory Separation_Logic_Imperative_HOL.Sep_Main}).
For a thorough introduction to these tools, we refer to their documentation.
However, we try to explain their most basic features when we use them.
›
subsection ‹First Example›
text ‹As a first example, let's compute a minimum value in a non-empty list,
wrt.~ some linear order.
We start by specifying the problem:
›
definition min_of_list :: "'a::linorder list ⇒ 'a nres" where
"min_of_list l ≡ ASSERT (l≠[]) ⪢ SPEC (λx. ∀y∈set l. x≤y)"
text ‹This specification asserts the precondition and then specifies
the valid results ‹x›. The ‹⪢› operator is a bind-operator on monads.
Note that the Isabelle Refinement Framework works with a set/exception monad
over the type @{typ "_ nres"}, where @{term FAIL} is the exception,
and @{term "RES X"} specifies a set @{term X} of possible results.
@{term SPEC} is just the predicate-version of @{term RES}
(actually @{term "SPEC Φ"} is a syntax abbreviation for @{term "RES (Collect Φ)"}).
Thus, @{term min_of_list} will fail if the list is empty, and otherwise
nondeterministically return one of the minimal elements.
›
subsubsection ‹Abstract Algorithm›
text ‹
Next, we develop an abstract algorithm for the problem.
A natural choice for a functional programmer is folding over the list,
initializing the fold with the first element.
›
definition min_of_list1 :: "'a::linorder list ⇒ 'a nres"
where "min_of_list1 l ≡ ASSERT (l≠[]) ⪢ RETURN (fold min (tl l) (hd l))"
text ‹Note that @{term RETURN} returns exactly one (deterministic) result. ›
text ‹We have to show that our implementation actually refines the specification›
lemma min_of_list1_refine: "(min_of_list1,min_of_list) ∈ Id → ⟨Id⟩nres_rel"
text ‹This lemma has to be read as follows: If the argument given to
@{const min_of_list1} and @{const min_of_list} are related
by @{const Id} (i.e.\ are identical), then the result of @{const min_of_list1} is
a refinement of the result of @{const min_of_list}, wrt.\ relation @{const Id}.
For an explanation, lets simplify the statement first:
›
apply (clarsimp intro!: nres_relI)
text ‹The @{typ "_ nres"} type defines the refinement ordering, which is a lifted subset ordering,
with @{term FAIL} being the greatest element. This means, that we can assume a
non-empty list during the refinement proof
(otherwise, the RHS will be @{term FAIL}, and the statement becomes trivial)
The Isabelle Refinement Framework provides various techniques to extract verification
conditions from given goals, we use the standard VCG here:
›
unfolding min_of_list_def min_of_list1_def
apply (refine_vcg)
text ‹The VCG leaves us with a standard HOL goal, which is easily provable›
by (auto simp: neq_Nil_conv Min.set_eq_fold[symmetric])
text ‹A more concise proof of the same lemma omits the initial simplification,
which we only inserted to explain the refinement ordering: ›
lemma "(min_of_list1,min_of_list) ∈ Id → ⟨Id⟩nres_rel"
unfolding min_of_list_def[abs_def] min_of_list1_def[abs_def]
apply (refine_vcg)
by (auto simp: neq_Nil_conv Min.set_eq_fold[symmetric])
subsubsection ‹Refined Abstract Algorithm›
text ‹Now, we have a nice functional implementation.
However, we are interested in an imperative implementation.
Ultimately, we want to implement the list by an array.
Thus, we replace folding over the list by indexing into the list,
and also add an index-shift to get rid of the @{term hd} and @{term tl}.
›
definition min_of_list2 :: "'a::linorder list ⇒ 'a nres"
where "min_of_list2 l ≡ ASSERT (l≠[]) ⪢ RETURN (fold (λi. min (l!(i+1))) [0..<length l - 1] (l!0))"
text ‹Proving refinement is straightforward, using the @{thm [source] fold_idx_conv} lemma.›
lemma min_of_list2_refine: "(min_of_list2, min_of_list1)∈Id → ⟨Id⟩nres_rel"
unfolding min_of_list2_def[abs_def] min_of_list1_def[abs_def]
apply refine_vcg
apply clarsimp_all
apply (rewrite in "_=⌑" fold_idx_conv)
by (auto simp: nth_tl hd_conv_nth)
subsubsection ‹Imperative Algorithm›
text ‹The version @{const min_of_list2} already looks like the desired imperative version,
only that we have lists instead of arrays, and would like to replace the folding over
@{term "[0..<length l -1]"} by a for-loop.
This is exactly what the Sepref-tool does. The following command synthesizes
an imperative version ‹min_of_list3› of the algorithm for natural numbers,
which uses an array instead of a list:
›
sepref_definition min_of_list3 is min_of_list2 :: "(array_assn nat_assn)⇧k →⇩a nat_assn"
unfolding min_of_list2_def[abs_def]
by sepref
text ‹The generated constant represents an Imperative/HOL program, and
is executable: ›
thm min_of_list3_def
export_code min_of_list3 checking SML_imp
text ‹Also note that the Sepref tool applied a deforestation optimization:
It recognizes a fold over @{term "[0..<n]"}, and implements it by the
tail-recursive function @{const "imp_for'"}, which uses a counter instead of
an intermediate list.
There are a couple of optimizations, which come in the form of two sets of
simplifier rules, which are applied one after the other:
›
thm sepref_opt_simps
thm sepref_opt_simps2
text ‹They are just named theorem collections, e.g., ‹sepref_opt_simps add/del›
can be used to modify them.›
text ‹Moreover, a refinement theorem is generated, which states the correspondence between
@{const min_of_list3} and @{const min_of_list2}: ›
thm min_of_list3.refine
text ‹It states the relations between the parameter and the result of
the concrete and abstract function. The parameter is related by
@{term "array_assn nat_assn"}. Here, @{term "array_assn A"} relates arrays
with lists, such that the elements are related @{term A} --- in our case by
‹nat_assn›, which relates natural numbers to themselves.
We also say that we @{emph ‹implement›} lists of nats by arrays of nats.
The result is also implemented by natural numbers.
Moreover, the parameters may be stored on the heap, and we have to indicate whether
the function keeps them intact or not. Here, we use the annotation ‹_⇧k› (for @{emph ‹keep›}) to indicate
that the parameter is kept intact, and ‹_⇧d› (for @{emph ‹destroy›}) to indicate that it is destroyed.
›
subsubsection ‹Overall Correctness Statement›
text ‹Finally, we can use transitivity of refinement to link our implementation to
the specification. The @{attribute FCOMP} attribute is able to compose refinement
theorems:›
theorem min_of_list3_correct: "(min_of_list3,min_of_list) ∈ (array_assn nat_assn)⇧k →⇩a nat_assn"
using min_of_list3.refine[FCOMP min_of_list2_refine, FCOMP min_of_list1_refine] .
text ‹While the above statement is suited to re-use the algorithm within the sepref-framework,
a more low-level correctness theorem can be stated using separation logic.
This has the advantage that understanding the statement depends on less
definitional overhead:›
lemma "l≠[] ⟹ <array_assn nat_assn l a> min_of_list3 a <λx. array_assn nat_assn l a * ↑(∀y∈set l. x≤y)>⇩t"
text ‹The proof of this theorem has to unfold the several layers of the Sepref framework,
down to the separation logic layer. An explanation of these layers is out of scope of this
quickstart guide, we just present some proof techniques that often work. In the best case,
the fully automatic proof will work:›
by (sep_auto
simp: min_of_list_def pure_def pw_le_iff refine_pw_simps
heap: min_of_list3_correct[THEN hfrefD, of l a, THEN hn_refineD, simplified])
text ‹If the automatic method does not work, here is a more explicit proof,
that can be adapted for proving similar statements:›
lemma "l≠[] ⟹ <array_assn nat_assn l a> min_of_list3 a <λx. array_assn nat_assn l a * ↑(∀y∈set l. x≤y)>⇩t"
proof -
text ‹We inlined the definition of @{const min_of_list}.
This will yield two proof obligations later, which we discharge as auxiliary lemmas here
›
assume [simp]: "l≠[]"
have [simp]: "nofail (min_of_list l)"
by (auto simp: min_of_list_def refine_pw_simps)
have 1: "⋀x. RETURN x ≤ min_of_list l ⟹ ∀y∈set l. x≤y"
by (auto simp: min_of_list_def pw_le_iff refine_pw_simps)
note rl = min_of_list3_correct[THEN hfrefD, of l a, THEN hn_refineD, simplified]
text ‹This should yield a Hoare-triple for @{term "min_of_list3 a"},
which can now be used to prove the desired statement via a consequence rule›
show ?thesis
apply (rule cons_rule[OF _ _ rl])
text ‹The preconditions should match, however, @{method sep_auto} is also able to discharge
more complicated implications here. Be sure to simplify with @{thm [source] pure_def},
if you have parameters that are not stored on the heap (in our case, we don't, but include the
simplification anyway.)›
apply (sep_auto simp: pure_def)
text ‹The heap-parts of the postcondition should also match.
The pure parts require the auxiliary statements that we proved above.›
apply (sep_auto simp: pure_def dest!: 1)
done
qed
subsubsection ‹Using the Algorithm›
text ‹As an example, we now want to use our algorithm to compute the minimum value
of some concrete list. In order to use an algorithm, we have to declare both,
it's abstract version and its implementation to the Sepref tool.
›
sepref_register min_of_list
declare min_of_list3_correct[sepref_fr_rules]
text ‹Now we can define the abstract version of our example algorithm.
We compute the minimum value of pseudo-random lists of a given length
›
primrec rand_list_aux :: "nat ⇒ nat ⇒ nat list" where
"rand_list_aux s 0 = []"
| "rand_list_aux s (Suc n) = (let s = (1664525 * s + 1013904223) mod 2^32 in s # rand_list_aux s n)"
definition "rand_list ≡ rand_list_aux 42"
definition "min_of_rand_list n = min_of_list (rand_list n)"
text ‹And use Sepref to synthesize a concrete version›
text ‹We use a feature of Sepref to combine imperative and purely functional code,
and leave the generation of the list purely functional, then copy it into an array,
and invoke our algorithm. We have to declare the @{const rand_list} operation:›
sepref_register rand_list
lemma [sepref_import_param]: "(rand_list,rand_list)∈nat_rel → ⟨nat_rel⟩list_rel" by auto
text ‹Here, we use a feature of Sepref to import parametricity theorems.
Note that the parametricity theorem we provide here is trivial, as
@{const nat_rel} is identity, and @{const list_rel} as well as @{term "(→)"}
preserve identity.
However, we have to specify a parametricity theorem that reflects the
structure of the involved types.
›
text ‹Finally, we can invoke Sepref›
sepref_definition min_of_rand_list1 is "min_of_rand_list" :: "nat_assn⇧k →⇩a nat_assn"
unfolding min_of_rand_list_def[abs_def]
text ‹We construct a plain list, however, the implementation of @{const min_of_list}
expects an array. We have to insert a conversion, which is conveniently done
with the @{method rewrite} method:
›
apply (rewrite in "min_of_list ⌑" array_fold_custom_of_list)
by sepref
text ‹In the generated code, we see that the pure @{const rand_list} function
is invoked, its result is converted to an array, which is then passed to
@{const min_of_list3}.
Note that @{command sepref_definition} prints the generated theorems to the
output on the end of the proof. Use the output panel, or hover the mouse over
the by-command to see this output.
›
text ‹The generated algorithm can be exported›
export_code min_of_rand_list1 checking SML OCaml? Haskell? Scala
text ‹and executed›
ML_val ‹@{code min_of_rand_list1} (@{code nat_of_integer} 100) ()›
text ‹Note that Imperative/HOL for ML generates a function from unit,
and applying this function triggers execution.›
subsection ‹Binary Search Example›
text ‹As second example, we consider a simple binary search algorithm.
We specify the abstract problem, i.e., finding an element in a sorted list.
›
definition "in_sorted_list x xs ≡ ASSERT (sorted xs) ⪢ RETURN (x∈set xs)"
text ‹And give a standard iterative implementation:›
definition "in_sorted_list1_invar x xs ≡ λ(l,u,found).
(l≤u ∧ u≤length xs)
∧ (found ⟶ x∈set xs)
∧ (¬found ⟶ (x∉set (take l xs) ∧ x∉set (drop u xs))
)"
definition "in_sorted_list1 x xs ≡ do {
let l=0;
let u=length xs;
(_,_,r) ← WHILEIT (in_sorted_list1_invar x xs)
(λ(l,u,found). l<u ∧ ¬found) (λ(l,u,found). do {
let i = (l+u) div 2;
ASSERT (i<length xs);
let xi = xs!i;
if x=xi then
RETURN (l,u,True)
else if x<xi then
RETURN (l,i,False)
else
RETURN (i+1,u,False)
}) (l,u,False);
RETURN r
}"
text ‹Note that we can refine certain operations only if we can prove that their
preconditions are matched. For example, we can refine list indexing to array
indexing only if we can prove that the index is in range. This proof has to be
done during the synthesis procedure. However, such precondition proofs may be
hard, in particular for automatic methods, and we have to do them anyway when
proving correct our abstract implementation. Thus, it is a good idea to assert
the preconditions in the abstract implementation. This way, they are immediately
available during synthesis (recall, when refining an assertion, you may assume
the asserted predicate @{thm le_ASSERTI}).
An alternative is to use monadic list operations that already assert their precondition.
The advantage is that you cannot forget to assert the precondition, the disadvantage
is that the operation is monadic, and thus, nesting it into other operations is more cumbersome.
In our case, the operation would be @{const mop_list_get}
(Look at it's simplified definition to get an impression what it does).
›
thm mop_list_get_alt
text ‹We first prove the refinement correct›
context begin
private lemma isl1_measure: "wf (measure (λ(l,u,f). u-l + (if f then 0 else 1)))" by simp
private lemma neq_nlt_is_gt:
fixes a b :: "'a::linorder"
shows "a≠b ⟹ ¬(a<b) ⟹ a>b" by simp
private lemma isl1_aux1:
assumes "sorted xs"
assumes "i<length xs"
assumes "xs!i < x"
shows "x∉set (take i xs)"
using assms
by (auto simp: take_set leD sorted_nth_mono)
private lemma isl1_aux2:
assumes "x ∉ set (take n xs)"
shows "x∉set (drop n xs) ⟷ x∉set xs"
apply (rewrite in "_ = ⌑" append_take_drop_id[of n,symmetric])
using assms
by (auto simp del: append_take_drop_id)
lemma in_sorted_list1_refine: "(in_sorted_list1, in_sorted_list)∈Id → Id → ⟨Id⟩nres_rel"
unfolding in_sorted_list1_def[abs_def] in_sorted_list_def[abs_def]
apply (refine_vcg isl1_measure)
apply (vc_solve simp: in_sorted_list1_invar_def isl1_aux1 isl1_aux2 solve: asm_rl)
apply (auto simp: take_set set_drop_conv leD sorted_nth_mono) []
apply (auto simp: take_set leD sorted_nth_mono dest: neq_nlt_is_gt) []
done
end
text ‹First, let's synthesize an implementation where the list elements are natural numbers.
We will discuss later how to generalize the implementation for arbitrary types.
For technical reasons, the Sepref tool works with uncurried functions. That is, every
function has exactly one argument. You can use the @{term uncurry} function,
and we also provide abbreviations @{term uncurry2} up to @{term uncurry5}.
If a function has no parameters, @{term uncurry0} adds a unit parameter.
›
sepref_definition in_sorted_list2 is "uncurry in_sorted_list1" :: "nat_assn⇧k *⇩a (array_assn nat_assn)⇧k →⇩a bool_assn"
unfolding in_sorted_list1_def[abs_def]
by sepref
export_code in_sorted_list2 checking SML
lemmas in_sorted_list2_correct = in_sorted_list2.refine[FCOMP in_sorted_list1_refine]
subsection ‹Basic Troubleshooting›
text ‹
In this section, we will explain how to investigate problems with the Sepref tool.
Most cases where @{method sepref} fails are due to some
missing operations, unsolvable preconditions, or an odd setup.
›
subsubsection ‹Example›
text ‹We start with an example. Recall the binary search algorithm.
This time, we forget to assert the precondition of the indexing operation.
›
definition "in_sorted_list1' x xs ≡ do {
let l=0;
let u=length xs;
(_,_,r) ← WHILEIT (in_sorted_list1_invar x xs)
(λ(l,u,found). l<u ∧ ¬found) (λ(l,u,found). do {
let i = (l+u) div 2;
let xi = xs!i;
if x=xi then
RETURN (l,u,True)
else if x<xi then
RETURN (l,i,False)
else
RETURN (i+1,u,False)
}) (l,u,False);
RETURN r
}"
text ‹We try to synthesize the implementation. Note that @{command sepref_thm} behaves like
@{command sepref_definition}, but actually defines no constant. It only generates a refinement theorem.›
sepref_thm in_sorted_list2 is "uncurry in_sorted_list1'" :: "nat_assn⇧k *⇩a (array_assn nat_assn)⇧k →⇩a bool_assn"
unfolding in_sorted_list1'_def[abs_def]
apply sepref_dbg_keep
supply [[goals_limit=1]]
apply sepref_dbg_trans_keep
apply sepref_dbg_trans_step_keep
apply (sepref_dbg_side_keep)
oops
subsubsection ‹Internals of Sepref›
text ‹
Internally, @{method sepref} consists of multiple phases that are executed
one after the other. Each phase comes with its own debugging method, which
only executes that phase. We illustrate this by repeating the refinement of
@{const "min_of_list2"}. This time, we use @{command sepref_thm}, which only
generates a refinement theorem, but defines no constants:
›
sepref_thm min_of_list3' is min_of_list2 :: "(array_assn nat_assn)⇧k →⇩a nat_assn"
unfolding min_of_list2_def[abs_def]
apply sepref_dbg_preproc
apply sepref_dbg_cons_init
apply sepref_dbg_id
apply sepref_dbg_monadify
apply sepref_dbg_opt_init
apply sepref_dbg_trans
apply sepref_dbg_opt
apply sepref_dbg_cons_solve
apply sepref_dbg_cons_solve
apply sepref_dbg_constraints
done
text ‹In the next sections, we will explain, by example, how to troubleshoot
the various phases of the tool. We will focus on the phases that are most
likely to fail.›
subsubsection ‹Initialization›
text ‹A common mistake is to forget the keep/destroy markers for the
refinement assertion, or specify a refinement assertion with a non-matching
type. This results in a type-error on the command›
sepref_thm test_add_2 is "λx. RETURN (2+x)" :: "nat_assn⇧k →⇩a nat_assn"
by sepref
subsubsection ‹Translation Phase›
text ‹In most cases, the translation phase will fail. Let's try the following refinement:›
sepref_thm test is "λl. RETURN (l!1 + 2)" :: "(array_assn nat_assn)⇧k →⇩a nat_assn"
text ‹The @{method sepref} method will just fail. To investigate further, we use
@{method sepref_dbg_keep}, which executes the phases until the first one fails.
It returns with the proof state before the failed phase, and, moreover, outputs
a trace of the phases, such that you can easily see which phase failed.
›
apply sepref_dbg_keep
supply [[goals_limit = 1]]
apply sepref_dbg_trans_keep
apply sepref_dbg_trans_step_keep
apply sepref_dbg_side_keep
oops
text ‹Inserting an assertion into the abstract program solves the problem:›
sepref_thm test is "λl. ASSERT (length l > 1) ⪢ RETURN (l!1 + 2)" :: "(array_assn nat_assn)⇧k →⇩a nat_assn"
by sepref
text ‹Here is an example for an unimplemented operation:›
sepref_thm test is "λl. RETURN (Min (set l))" :: "(array_assn nat_assn)⇧k →⇩a nat_assn"
supply [[goals_limit = 1]]
apply sepref_dbg_keep
apply sepref_dbg_trans_keep
apply sepref_dbg_trans_step_keep
oops
subsection ‹The Isabelle Imperative Collection Framework (IICF)›
text ‹
The IICF provides a library of imperative data structures, and some
management infrastructure. The main idea is to have interfaces and implementations.
An interface specifies an abstract data type (e.g., @{typ "_ list"}) and some operations with preconditions
on it (e.g., @{term "(@)"} or @{term "nth"} with in-range precondition).
An implementation of an interface provides a refinement assertion from the abstract data type to
some concrete data type, as well as implementations for (a subset of) the interface's operations.
The implementation may add some more implementation specific preconditions.
The default interfaces of the IICF are in the folder ‹IICF/Intf›, and the standard implementations are in
‹IICF/Impl›.
›
subsubsection ‹Map Example›
text ‹Let's implement a function that maps a finite set to an initial
segment of the natural numbers
›
definition "nat_seg_map s ≡
ASSERT (finite s) ⪢ SPEC (λm. dom m = s ∧ ran m = {0..<card s})"
text ‹We implement the function by iterating over the set, and building the map›
definition "nat_seg_map1 s ≡ do {
ASSERT (finite s);
(m,_) ← FOREACHi (λit (m,i). dom m = s-it ∧ ran m = {0..<i} ∧ i=card (s - it))
s (λx (m,i). RETURN (m(x↦i),i+1)) (Map.empty,0);
RETURN m
}"
lemma nat_seg_map1_refine: "(nat_seg_map1, nat_seg_map) ∈ Id → ⟨Id⟩nres_rel"
apply (intro fun_relI)
unfolding nat_seg_map1_def[abs_def] nat_seg_map_def[abs_def]
apply (refine_vcg)
apply (vc_solve simp: it_step_insert_iff solve: asm_rl dest: domD)
done
text ‹We use hashsets @{term "hs.assn"} and hashmaps (@{term "hm.assn"}). ›
sepref_definition nat_seg_map2 is nat_seg_map1 :: "(hs.assn id_assn)⇧k →⇩a hm.assn id_assn nat_assn"
unfolding nat_seg_map1_def[abs_def]
apply sepref_dbg_keep
apply sepref_dbg_trans_keep
oops
text ‹
Assignment of implementations to constructor operations is done by rewriting them to
synonyms which are bound to a specific implementation. For hashmaps, we have
@{const op_hm_empty}, and the rules @{thm [source] hm.fold_custom_empty}.
›
sepref_definition nat_seg_map2 is nat_seg_map1 :: "(hs.assn id_assn)⇧k →⇩a hm.assn id_assn nat_assn"
unfolding nat_seg_map1_def[abs_def]
apply (rewrite in "FOREACHi _ _ _ ⌑" "hm.fold_custom_empty")
by sepref
export_code nat_seg_map2 checking SML
lemmas nat_seg_map2_correct = nat_seg_map2.refine[FCOMP nat_seg_map1_refine]
subsection ‹Specification of Preconditions›
text ‹In this example, we will discuss how to specify precondition of operations,
which are required for refinement to work.
Consider the following function, which increments all members of a list by one:
›
definition "incr_list l ≡ map ((+) 1) l"
text ‹We might want to implement it as follows›
definition "incr_list1 l ≡ fold (λi l. l[i:=1 + l!i]) [0..<length l] l"
lemma incr_list1_refine: "(incr_list1, incr_list)∈Id → Id"
proof (intro fun_relI; simp)
fix l :: "'a list"
{ fix n m
assume "n≤m" and "length l = m"
hence "fold (λi l. l[i:=1+l!i]) [n..<m] l = take n l @ map (((+))1) (drop n l)"
apply (induction arbitrary: l rule: inc_induct)
apply simp
apply (clarsimp simp: upt_conv_Cons take_Suc_conv_app_nth)
apply (auto simp add: list_eq_iff_nth_eq nth_Cons split: nat.split)
done
}
from this[of 0 "length l"] show "incr_list1 l = incr_list l"
unfolding incr_list_def incr_list1_def
by simp
qed
text ‹Trying to refine this reveals a problem:›
sepref_thm incr_list2 is "RETURN o incr_list1" :: "(array_assn nat_assn)⇧d →⇩a array_assn nat_assn"
unfolding incr_list1_def[abs_def]
apply sepref_dbg_keep
apply sepref_dbg_trans_keep
apply sepref_dbg_trans_step_keep
apply sepref_dbg_side_keep
oops
text ‹
Of course, the fold loop has the invariant that the length of the list does not change,
and thus, indexing is in range. We only cannot prove it during the automatic synthesis.
Here, the only solution is to do a manual refinement into the nres-monad,
and adding an assertion that indexing is always in range.
We use the @{const nfoldli} combinator, which generalizes @{const fold} in two directions:
▸ The function is inside the nres monad
▸ There is a continuation condition. If this is not satisfied, the fold returns immediately,
dropping the rest of the list.
›
definition "incr_list2 l ≡ nfoldli
[0..<length l]
(λ_. True)
(λi l. ASSERT (i<length l) ⪢ RETURN (l[i:=1+l!i]))
l"
text ‹
Note: Often, it is simpler to prove refinement of the abstract specification, rather
than proving refinement to some intermediate specification that may have already done
refinements "in the wrong direction". In our case, proving refinement of @{const incr_list1}
would require to generalize the statement to keep track of the list-length invariant,
while proving refinement of @{const incr_list} directly is as easy as proving the original
refinement for @{const incr_list1}.
›
lemma incr_list2_refine: "(incr_list2,RETURN o incr_list) ∈ Id → ⟨Id⟩nres_rel"
proof (intro nres_relI fun_relI; simp)
fix l :: "'a list"
show "incr_list2 l ≤ RETURN (incr_list l)"
unfolding incr_list2_def incr_list_def
apply (refine_vcg nfoldli_rule[where I="λl1 l2 s. s = map (((+))1) (take (length l1) l) @ drop (length l1) l"])
apply (vc_solve
simp: upt_eq_append_conv upt_eq_Cons_conv
simp: nth_append list_update_append upd_conv_take_nth_drop take_Suc_conv_app_nth
solve: asm_rl
)
done
qed
sepref_definition incr_list3 is "incr_list2" :: "(array_assn nat_assn)⇧d →⇩a array_assn nat_assn"
unfolding incr_list2_def[abs_def]
by sepref
lemmas incr_list3_correct = incr_list3.refine[FCOMP incr_list2_refine]
subsection ‹Linearity and Copying›
text ‹Consider the following implementation of an operation to swap to list
indexes. While it is perfectly valid in a functional setting, an imperative
implementation has a problem here: Once the update a index ‹i› is done,
the old value cannot be read from index ‹i› any more. We try to implement the
list with an array:›
sepref_thm swap_nonlinear is "uncurry2 (λl i j. do {
ASSERT (i<length l ∧ j<length l);
RETURN (l[i:=l!j, j:=l!i])
})" :: "(array_assn id_assn)⇧d *⇩a nat_assn⇧k *⇩a nat_assn⇧k →⇩a array_assn id_assn"
supply [[goals_limit = 1]]
apply sepref_dbg_keep
apply sepref_dbg_trans_keep
apply sepref_dbg_trans_step_keep
oops
text ‹The fix for our swap function is quite obvious. Using a temporary storage
for the intermediate value, we write:›
sepref_thm swap_with_tmp is "uncurry2 (λl i j. do {
ASSERT (i<length l ∧ j<length l);
let tmp = l!i;
RETURN (l[i:=l!j, j:=tmp])
})" :: "(array_assn id_assn)⇧d *⇩a nat_assn⇧k *⇩a nat_assn⇧k →⇩a array_assn id_assn"
by sepref
text ‹Note that also the argument must be marked as destroyed ‹()⇧d›. Otherwise, we get a similar error as above,
but in a different phase: ›
sepref_thm swap_with_tmp is "uncurry2 (λl i j. do {
ASSERT (i<length l ∧ j<length l);
let tmp = l!i;
RETURN (l[i:=l!j, j:=tmp])
})" :: "(array_assn id_assn)⇧k *⇩a nat_assn⇧k *⇩a nat_assn⇧k →⇩a array_assn id_assn"
apply sepref_dbg_keep
apply sepref_dbg_cons_solve_keep
oops
text ‹If copying is really required, you have to insert it manually.
Reconsider the example @{const incr_list} from above. This time,
we want to preserve the original data (note the ‹()⇧k› annotation):
›
sepref_thm incr_list3_preserve is "incr_list2" :: "(array_assn nat_assn)⇧k →⇩a array_assn nat_assn"
unfolding incr_list2_def[abs_def]
apply (rewrite in "nfoldli _ _ _ ⌑" op_list_copy_def[symmetric])
by sepref
subsection ‹Nesting of Data Structures›
text ‹
Sepref and the IICF support nesting of data structures with some limitations:
▪ Only the container or its elements can be visible at the same time.
For example, if you have a product of two arrays, you can either see the
two arrays, or the product. An operation like ‹snd› would have to destroy
the product, loosing the first component. Inside a case distinction, you
cannot access the compound object.
These limitations are somewhat relaxed for pure data types, which can always
be restored.
▪ Most IICF data structures only support pure component types.
Exceptions are HOL-lists, and the list-based set and multiset implementations
‹List_MsetO› and ‹List_SetO› (Here, the ‹O› stands for ‹own›, which means
that the data-structure owns its elements.).
›
text ‹Works fine:›
sepref_thm product_ex1 is "uncurry0 (do {
let p = (op_array_replicate 5 True, op_array_replicate 2 False);
case p of (a1,a2) ⇒ RETURN (a1!2)
})" :: "unit_assn⇧k →⇩a bool_assn"
by sepref
text ‹Fails: We cannot access compound type inside case distinction›
sepref_thm product_ex2 is "uncurry0 (do {
let p = (op_array_replicate 5 True, op_array_replicate 2 False);
case p of (a1,a2) ⇒ RETURN (snd p!1)
})" :: "unit_assn⇧k →⇩a bool_assn"
apply sepref_dbg_keep
apply sepref_dbg_trans_keep
apply sepref_dbg_trans_step_keep
oops
text ‹Works fine, as components of product are pure, such that product can be restored inside case.›
sepref_thm product_ex2 is "uncurry0 (do {
let p = (op_list_replicate 5 True, op_list_replicate 2 False);
case p of (a1,a2) ⇒ RETURN (snd p!1)
})" :: "unit_assn⇧k →⇩a bool_assn"
by sepref_dbg_keep
text ‹Trying to create a list of arrays, first attempt: ›
sepref_thm set_of_arrays_ex is "uncurry0 (RETURN (op_list_append [] op_array_empty))" :: "unit_assn⇧k →⇩a arl_assn (array_assn nat_assn)"
unfolding "arl.fold_custom_empty"
apply sepref_dbg_keep
apply sepref_dbg_trans_keep
apply sepref_dbg_trans_step_keep
supply [[goals_limit = 1, unify_trace_failure]]
oops
text ‹So lets choose a circular singly linked list (csll), which does not require its elements to be of default type class›
sepref_thm set_of_arrays_ex is "uncurry0 (RETURN (op_list_append [] op_array_empty))" :: "unit_assn⇧k →⇩a csll.assn (array_assn nat_assn)"
unfolding "csll.fold_custom_empty"
apply sepref_dbg_keep
apply sepref_dbg_trans_keep
apply sepref_dbg_trans_step_keep
oops
text ‹Finally, there are a few data structures that already support nested element types, for example, functional lists:›
sepref_thm set_of_arrays_ex is "uncurry0 (RETURN (op_list_append [] op_array_empty))" :: "unit_assn⇧k →⇩a list_assn (array_assn nat_assn)"
unfolding "HOL_list.fold_custom_empty"
by sepref
subsection ‹Fixed-Size Data Structures›
text ‹For many algorithms, the required size of a data structure is already known,
such that it is not necessary to use data structures with dynamic resizing.
The Sepref-tool supports such data structures, however, with some limitations.
›
subsubsection ‹Running Example›
text ‹
Assume we want to read a sequence of natural numbers in the range @{term "{0..<N}"},
and drop duplicate numbers. The following abstract algorithm may work:
›
definition "remdup l ≡ do {
(s,r) ← nfoldli l (λ_. True)
(λx (s,r). do {
ASSERT (distinct r ∧ set r ⊆ set l ∧ s = set r);
if x∈s then RETURN (s,r) else RETURN (insert x s, r@[x])
})
({},[]);
RETURN r
}"
text ‹We want to use ‹remdup› in our abstract code, so we have to register it.›
sepref_register remdup
text ‹The straightforward version with dynamic data-structures is: ›
sepref_definition remdup1 is "remdup" :: "(list_assn nat_assn)⇧k →⇩a arl_assn nat_assn"
unfolding remdup_def[abs_def]
apply (rewrite in "nfoldli _ _ _ ⌑" ias.fold_custom_empty)
apply (rewrite in "nfoldli _ _ _ ⌑" arl.fold_custom_empty)
by sepref
subsubsection ‹Initialization of Dynamic Data Structures›
text ‹Now let's fix an upper bound for the numbers in the list.
Initializations and statically sized data structures must always be fixed variables,
they cannot be computed inside the refined program.
TODO: Lift this restriction at least for initialization hints that do not occur
in the refinement assertions.
›
context fixes N :: nat begin
sepref_definition remdup1_initsz is "remdup" :: "(list_assn nat_assn)⇧k →⇩a arl_assn nat_assn"
unfolding remdup_def[abs_def]
apply (rewrite in "nfoldli _ _ _ ⌑" ias_sz.fold_custom_empty[of N])
apply (rewrite in "nfoldli _ _ _ ⌑" arl_sz.fold_custom_empty[of N])
by sepref
end
text ‹To get a usable function, we may add the fixed ‹N› as a parameter, effectively converting
the initialization hint to a parameter, which, however, has no abstract meaning›
definition "remdup_initsz (N::nat) ≡ remdup"
lemma remdup_init_hnr:
"(uncurry remdup1_initsz, uncurry remdup_initsz) ∈ nat_assn⇧k *⇩a (list_assn nat_assn)⇧k →⇩a arl_assn nat_assn"
using remdup1_initsz.refine unfolding remdup_initsz_def[abs_def]
unfolding hfref_def hn_refine_def
by (auto simp: pure_def)
subsubsection ‹Static Data Structures›
text ‹We use a locale to hide local declarations. Note: This locale will never be interpreted,
otherwise all the local setup, that does not make sense outside the locale, would become visible.
TODO: This is probably some abuse of locales to emulate complex private setup,
including declaration of constants and lemmas.
›
locale my_remdup_impl_loc =
fixes N :: nat
assumes "N>0"
begin
text ‹For locale hierarchies, the following seems not to be available directly in Isabelle,
however, it is useful when transferring stuff between the global theory and the locale›
lemma my_remdup_impl_loc_this: "my_remdup_impl_loc N" by unfold_locales
text ‹
Note that this will often require to use ‹N› as a usual constant, which
is refined. For pure refinements, we can use the @{attribute sepref_import_param}
attribute, which will convert a parametricity theorem to a rule for Sepref:
›
sepref_register N
lemma N_hnr[sepref_import_param]: "(N,N)∈nat_rel" by simp
thm N_hnr
text ‹Alternatively, we could directly prove the following rule, which, however, is
more cumbersome: ›
lemma N_hnr': "(uncurry0 (return N), uncurry0 (RETURN N))∈unit_assn⇧k →⇩a nat_assn"
by sepref_to_hoare sep_auto
text ‹Next, we use an array-list with a fixed maximum capacity.
Note that the capacity is part of the refinement assertion now.
›
sepref_definition remdup1_fixed is "remdup" :: "(list_assn nat_assn)⇧k →⇩a marl_assn N nat_assn"
unfolding remdup_def[abs_def]
apply (rewrite in "nfoldli _ _ _ ⌑" ias_sz.fold_custom_empty[of N])
apply (rewrite in "nfoldli _ _ _ ⌑" marl_fold_custom_empty_sz[of N])
supply [[goals_limit = 1]]
apply sepref_dbg_keep
apply sepref_dbg_trans_keep
apply sepref_dbg_trans_step_keep
oops
text ‹Moreover, we add a precondition on the list›
sepref_definition remdup1_fixed is "remdup" :: "[λl. set l ⊆ {0..<N}]⇩a (list_assn nat_assn)⇧k → marl_assn N nat_assn"
unfolding remdup_def[abs_def]
apply (rewrite in "nfoldli _ _ _ ⌑" ias_sz.fold_custom_empty[of N])
apply (rewrite in "nfoldli _ _ _ ⌑" marl_fold_custom_empty_sz[of N])
supply [[goals_limit = 1]]
apply sepref_dbg_keep
apply sepref_dbg_trans_keep
apply sepref_dbg_trans_step_keep
apply sepref_dbg_side_keep
oops
text ‹We can prove the remaining subgoal, e.g., by @{method auto} with the following
lemma declared as introduction rule:›
lemma aux1[intro]: "⟦ set l ⊂ {0..<N}; distinct l ⟧ ⟹ length l < N"
apply (simp add: distinct_card[symmetric])
apply (drule psubset_card_mono[rotated])
apply auto
done
text ‹We use some standard boilerplate to define the constant globally, although
being inside the locale. This is required for code-generation.›
sepref_thm remdup1_fixed is "remdup" :: "[λl. set l ⊆ {0..<N}]⇩a (list_assn nat_assn)⇧k → marl_assn N nat_assn"
unfolding remdup_def[abs_def]
apply (rewrite in "nfoldli _ _ _ ⌑" ias_sz.fold_custom_empty[of N])
apply (rewrite in "nfoldli _ _ _ ⌑" marl_fold_custom_empty_sz[of N])
by sepref
concrete_definition (in -) remdup1_fixed uses "my_remdup_impl_loc.remdup1_fixed.refine_raw" is "(?f,_)∈_"
prepare_code_thms (in -) remdup1_fixed_def
lemmas remdup1_fixed_refine[sepref_fr_rules] = remdup1_fixed.refine[OF my_remdup_impl_loc_this]
text ‹The @{command concrete_definition} command defines the constant globally, without any locale assumptions. For this,
it extracts the definition from the theorem, according to the specified pattern. Note, you have to
include the uncurrying into the pattern, e.g., ‹(uncurry ?f,_)∈_›.
The @{command prepare_code_thms} command sets up code equations for recursion combinators that may have been synthesized.
This is required as the code generator works with equation systems, while the heap-monad works with
fixed-point combinators.
Finally, the third lemma command imports the refinement lemma back into the locale, and registers it
as refinement rule for Sepref.
›
text ‹Now, we can refine @{const remdup} to @{term "remdup1_fixed N"} inside the
locale. The latter is a global constant with an unconditional definition, thus code
can be generated for it.›
text ‹Inside the locale, we can do some more refinements: ›
definition "test_remdup ≡ do {l ← remdup [0..<N]; RETURN (length l) }"
text ‹Note that the abstract @{const test_remdup} is just an abbreviation for
@{term "my_remdup_impl_loc.test_remdup N"}.
Whenever we want Sepref to treat a compound term like a constant, we have to wrap the term into
a @{const PR_CONST} tag. While @{command sepref_register} does this automatically,
the ‹PR_CONST› has to occur in the refinement rule.›
sepref_register "test_remdup"
sepref_thm test_remdup1 is
"uncurry0 (PR_CONST test_remdup)" :: "unit_assn⇧k →⇩a nat_assn"
unfolding test_remdup_def PR_CONST_def
by sepref
concrete_definition (in -) test_remdup1 uses my_remdup_impl_loc.test_remdup1.refine_raw is "(uncurry0 ?f,_)∈_"
prepare_code_thms (in -) test_remdup1_def
lemmas test_remdup1_refine[sepref_fr_rules] = test_remdup1.refine[of N]
end
text ‹Outside the locale, a refinement of @{term my_remdup_impl_loc.test_remdup} also makes sense,
however, with an extra argument @{term N}.›
thm test_remdup1.refine
lemma test_remdup1_refine_aux: "(test_remdup1, my_remdup_impl_loc.test_remdup) ∈ [my_remdup_impl_loc]⇩a nat_assn⇧k → nat_assn"
using test_remdup1.refine
unfolding hfref_def hn_refine_def
by (auto simp: pure_def)
text ‹We can also write a more direct precondition, as long as it implies the locale›
lemma test_remdup1_refine: "(test_remdup1, my_remdup_impl_loc.test_remdup) ∈ [λN. N>0]⇩a nat_assn⇧k → nat_assn"
apply (rule hfref_cons[OF test_remdup1_refine_aux _ entt_refl entt_refl entt_refl])
by unfold_locales
export_code test_remdup1 checking SML
text ‹We can also register the abstract constant and the refinement, to use it in further refinements›
sepref_register my_remdup_impl_loc.test_remdup
lemmas [sepref_fr_rules] = test_remdup1_refine
subsubsection ‹Static Data Structures with Custom Element Relations›
text ‹In the previous section, we have presented a refinement using an array-list
without dynamic resizing. However, the argument that we actually could append
to this array was quite complicated.
Another possibility is to use bounded refinement relations, i.e.,
a refinement relation intersected with a condition for the abstract object.
In our case, @{term "nbn_assn N"} relates natural numbers less than ‹N› to themselves.
We will repeat the above development, using the bounded relation approach:
›
definition "bremdup l ≡ do {
(s,r) ← nfoldli l (λ_. True)
(λx (s,r). do {
ASSERT (distinct r ∧ s = set r);
if x∈s then RETURN (s,r) else RETURN (insert x s, r@[x])
})
({},[]);
RETURN r
}"
sepref_register bremdup
locale my_bremdup_impl_loc =
fixes N :: nat
assumes "N>0"
begin
lemma my_bremdup_impl_loc_this: "my_bremdup_impl_loc N" by unfold_locales
sepref_register N
lemma N_hnr[sepref_import_param]: "(N,N)∈nat_rel" by simp
text ‹Conceptually, what we insert in our list are elements, and
these are less than ‹N›.›
abbreviation "elem_assn ≡ nbn_assn N"
lemma aux1[intro]: "⟦ set l ⊂ {0..<N}; distinct l ⟧ ⟹ length l < N"
apply (simp add: distinct_card[symmetric])
apply (drule psubset_card_mono[rotated])
apply auto
done
sepref_thm remdup1_fixed is "remdup" :: "[λl. set l ⊆ {0..<N}]⇩a (list_assn elem_assn)⇧k → marl_assn N elem_assn"
unfolding remdup_def[abs_def]
apply (rewrite in "nfoldli _ _ _ ⌑" ias_sz.fold_custom_empty[of N])
apply (rewrite in "nfoldli _ _ _ ⌑" marl_fold_custom_empty_sz[of N])
by sepref
concrete_definition (in -) bremdup1_fixed uses "my_bremdup_impl_loc.remdup1_fixed.refine_raw" is "(?f,_)∈_"
prepare_code_thms (in -) bremdup1_fixed_def
lemmas remdup1_fixed_refine[sepref_fr_rules] = bremdup1_fixed.refine[OF my_bremdup_impl_loc_this]
definition "test_remdup ≡ do {l ← remdup [0..<N]; RETURN (length l) }"
sepref_register "test_remdup"
text ‹This refinement depends on the (somewhat experimental) subtyping feature
to convert from @{term nat_assn} to @{term elem_assn}, based on context information›
sepref_thm test_remdup1 is
"uncurry0 (PR_CONST test_remdup)" :: "unit_assn⇧k →⇩a nat_assn"
unfolding test_remdup_def PR_CONST_def
by sepref
concrete_definition (in -) test_bremdup1 uses my_bremdup_impl_loc.test_remdup1.refine_raw is "(uncurry0 ?f,_)∈_"
prepare_code_thms (in -) test_bremdup1_def
lemmas test_remdup1_refine[sepref_fr_rules] = test_bremdup1.refine[of N]
end
lemma test_bremdup1_refine_aux: "(test_bremdup1, my_bremdup_impl_loc.test_remdup) ∈ [my_bremdup_impl_loc]⇩a nat_assn⇧k → nat_assn"
using test_bremdup1.refine
unfolding hfref_def hn_refine_def
by (auto simp: pure_def)
lemma test_bremdup1_refine: "(test_bremdup1, my_bremdup_impl_loc.test_remdup) ∈ [λN. N>0]⇩a nat_assn⇧k → nat_assn"
apply (rule hfref_cons[OF test_bremdup1_refine_aux _ entt_refl entt_refl entt_refl])
by unfold_locales
export_code test_bremdup1 checking SML
text ‹We can also register the abstract constant and the refinement, to use it in further refinements›
sepref_register test_bremdup: my_bremdup_impl_loc.test_remdup
lemmas [sepref_fr_rules] = test_bremdup1_refine
subsubsection ‹Fixed-Value Restriction›
text ‹Initialization only works with fixed values, not with dynamically computed values›
sepref_definition copy_list_to_array is "λl. do {
let N = length l;
let l' = op_arl_empty_sz N;
nfoldli l (λx. True) (λx s. mop_list_append s x) l'
}" :: "(list_assn nat_assn)⇧k →⇩a arl_assn nat_assn"
apply sepref_dbg_keep
apply sepref_dbg_trans_keep
apply sepref_dbg_trans_step_keep
supply [[unify_trace_failure, goals_limit=1]]
oops
subsubsection ‹Matrix Example›
text ‹
We first give an example for implementing point-wise matrix operations, using
some utilities from the (very prototype) matrix library.
Our matrix library uses functions @{typ "'a mtx"} (which is @{typ "nat×nat ⇒ 'a"})
as the abstract representation. The (currently only) implementation is by arrays,
mapping points at coordinates out of range to @{term 0}.
›
text ‹Pointwise unary operations are those that modify every point
of a matrix independently. Moreover, a zero-value must be mapped to a zero-value.
As an example, we duplicate every value on the diagonal of a matrix
›
text ‹Abstractly, we apply the following function to every value.
The first parameter are the coordinates.›
definition mtx_dup_diag_f:: "nat×nat ⇒ 'a::{numeral,times,mult_zero} ⇒ 'a"
where "mtx_dup_diag_f ≡ λ(i,j) x. if i=j then x*(2) else x"
text ‹We refine this function to a heap-function,
using the identity mapping for values.›
context
fixes dummy :: "'a::{numeral,times,mult_zero}"
notes [[sepref_register_adhoc "PR_CONST (2::'a)"]]
notes [sepref_import_param] = IdI[of "PR_CONST (2::'a)"]
notes [sepref_import_param] = IdI[of "(*)::'a⇒_", folded fun_rel_id_simp]
begin
sepref_definition mtx_dup_diag_f1 is "uncurry (RETURN oo (mtx_dup_diag_f::_⇒'a⇒_))" :: "(prod_assn nat_assn nat_assn)⇧k*⇩aid_assn⇧k →⇩a id_assn"
unfolding mtx_dup_diag_f_def
by sepref
end
text ‹Then, we instantiate the corresponding locale, to get an implementation for
array matrices. Note that we restrict ourselves to square matrices here: ›
interpretation dup_diag: amtx_pointwise_unop_impl N N mtx_dup_diag_f id_assn mtx_dup_diag_f1
apply standard
applyS (simp add: mtx_dup_diag_f_def) []
applyS (rule mtx_dup_diag_f1.refine)
done
text ‹We introduce an abbreviation for the abstract operation.
Note: We do not have to register it (this is done once and for all
for @{const mtx_pointwise_unop}), nor do we have to declare a refinement rule
(done by ‹amtx_pointwise_unop_impl›-locale)
›
abbreviation "mtx_dup_diag ≡ mtx_pointwise_unop mtx_dup_diag_f"
text ‹The operation is usable now:›
sepref_thm mtx_dup_test is "λm. RETURN (mtx_dup_diag (mtx_dup_diag m))" :: "(asmtx_assn N int_assn)⇧d →⇩a asmtx_assn N int_assn"
by sepref
text ‹Similarly, there are operations to combine to matrices, and to compare two matrices:›
interpretation pw_add: amtx_pointwise_binop_impl N M "(((+))::(_::monoid_add) ⇒ _)" id_assn "return oo ((+))"
for N M
apply standard
apply simp
apply (sepref_to_hoare) apply sep_auto
done
abbreviation "mtx_add ≡ mtx_pointwise_binop ((+))"
sepref_thm mtx_add_test is "uncurry2 (λm1 m2 m3. RETURN (mtx_add m1 (mtx_add m2 m3)))"
:: "(amtx_assn N M int_assn)⇧d *⇩a (amtx_assn N M int_assn)⇧d *⇩a (amtx_assn N M int_assn)⇧k →⇩a amtx_assn N M int_assn"
by sepref
text ‹A limitation here is, that the first operand is destroyed on a coarse-grained level.
Although adding a matrix to itself would be valid, our tool does not support this.
(However, you may use an unary operation)›
sepref_thm mtx_dup_alt_test is "(λm. RETURN (mtx_add m m))"
:: "(amtx_assn N M int_assn)⇧d →⇩a amtx_assn N M int_assn"
apply sepref_dbg_keep
apply sepref_dbg_trans_keep
apply sepref_dbg_trans_step_keep
oops
text ‹Of course, you can always copy the matrix manually:›
sepref_thm mtx_dup_alt_test is "(λm. RETURN (mtx_add (op_mtx_copy m) m))"
:: "(amtx_assn N M int_assn)⇧k →⇩a amtx_assn N M int_assn"
by sepref
text ‹A compare operation checks that all pairs of entries fulfill some property ‹f›, and
at least one entry fullfills a property ‹g›.›
interpretation pw_lt: amtx_pointwise_cmpop_impl N M "((≤)::(_::order) ⇒ _)" "((≠)::(_::order) ⇒ _)" id_assn "return oo (≤)" "return oo (≠)"
for N M
apply standard
apply simp
apply simp
apply (sepref_to_hoare) apply sep_auto
apply (sepref_to_hoare) apply sep_auto
done
abbreviation "mtx_lt ≡ mtx_pointwise_cmpop (≤) (≠)"
sepref_thm test_mtx_cmp is "(λm. do { RETURN (mtx_lt (op_amtx_dfltNxM N M 0) m) })" :: "(amtx_assn N M int_assn)⇧k →⇩a bool_assn"
by sepref
text ‹In a final example, we store some coordinates in a set, and then
use the stored coordinates to access the matrix again. This illustrates how
bounded relations can be used to maintain extra information, i.e., coordinates
being in range›
context
fixes N M :: nat
notes [[sepref_register_adhoc N M]]
notes [sepref_import_param] = IdI[of N] IdI[of M]
begin
text ‹We introduce an assertion for coordinates›
abbreviation "co_assn ≡ prod_assn (nbn_assn N) (nbn_assn M)"
text ‹And one for integer matrices›
abbreviation "mtx_assn ≡ amtx_assn N M int_assn"
definition "co_set_gen ≡ do {
nfoldli [0..<N] (λ_. True) (λi. nfoldli [0..<M] (λ_. True) (λj s.
if max i j - min i j ≤ 1 then RETURN (insert (i,j) s)
else RETURN s
)) {}
}"
sepref_definition co_set_gen1 is "uncurry0 co_set_gen" :: "unit_assn⇧k →⇩a hs.assn co_assn"
unfolding co_set_gen_def
apply (rewrite "hs.fold_custom_empty")
apply sepref_dbg_keep
apply sepref_dbg_trans_keep
oops
text ‹We can use a feature of Sepref, to annotate the desired assertion directly
into the abstract program. For this, we use @{thm [source] annotate_assn},
which inserts the (special) constant @{const ASSN_ANNOT}, which is just identity,
but enforces refinement with the given assertion.›
sepref_definition co_set_gen1 is "uncurry0 (PR_CONST co_set_gen)" :: "unit_assn⇧k →⇩a hs.assn co_assn"
unfolding co_set_gen_def PR_CONST_def
apply (rewrite "hs.fold_custom_empty")
apply (rewrite in "insert ⌑ _" annotate_assn[where A=co_assn])
by sepref
lemmas [sepref_fr_rules] = co_set_gen1.refine
sepref_register "co_set_gen"
text ‹Now we can use the entries from the set as coordinates,
without any worries about them being out of range›
sepref_thm co_set_use is "(λm. do {
co ← co_set_gen;
FOREACH co (λ(i,j) m. RETURN ( m((i,j) := 1))) m
})" :: "mtx_assn⇧d →⇩a mtx_assn"
by sepref
end
subsection ‹Type Classes›
text ‹TBD›
subsection ‹Higher-Order›
text ‹TBD›
subsection ‹A-Posteriori Optimizations›
text ‹The theorem collection @{attribute sepref_opt_simps}
and @{attribute sepref_opt_simps2} contain simplifier lemmas that are
applied, in two stages, to the generated Imperative/HOL program.
This is the place where some optimizations, such as deforestation, and
simplifying monad-expressions using the monad laws, take place.
›
thm sepref_opt_simps
thm sepref_opt_simps2
subsection ‹Short-Circuit Evaluation›
text ‹Consider›
sepref_thm test_sc_eval is "RETURN o (λl. length l > 0 ∧ hd l)" :: "(list_assn bool_assn)⇧k →⇩a bool_assn"
apply sepref_dbg_keep
apply sepref_dbg_trans_keep
apply sepref_dbg_trans_step_keep
oops
sepref_thm test_sc_eval is "RETURN o (λl. length l > 0 ∧ hd l)" :: "(list_assn bool_assn)⇧k →⇩a bool_assn"
unfolding short_circuit_conv
by sepref
end
Theory Sepref_Guide_Reference
section ‹Reference Guide›
theory Sepref_Guide_Reference
imports "../IICF/IICF"
begin
text ‹This guide contains a short reference of the most
important Sepref commands, methods, and attributes, as well as
a short description of the internal working, and troubleshooting information
with examples.
Note: To get an impression how to actually use the Sepref-tool, read the
quickstart guide first!
›
subsection ‹The Sepref Method›
text ‹The @{method sepref} method is the central method of the tool.
Given a schematic goal of the form ‹hn_refine Γ ?c ?Γ' ?R f›, it tries
to synthesize terms for the schematics and prove the theorem. Note that the
‹?Γ'› and ‹?R› may also be fixed terms, in which case frame inference is used
to match the generated assertions with the given ones.
‹Γ› must contain a description of the available refinements on the heap, the
assertion for each variable must be marked with a ‹hn_ctxt› tag.
Alternatively, a term of the form ‹(?c,f)∈[P]⇩a A→R› is accepted, where ‹A›
describes the refinement and preservation of the arguments, and ‹R› the refinement
of the result. ‹f› must be in uncurried form (i.e. have exactly one argument).
We give some very basic examples here. In practice, you would almost always use
the higher-level commands @{command sepref_definition} and @{command sepref_register}.
›
text ‹In its most primitive form, the Sepref-tool is applied like this:›
schematic_goal
notes [id_rules] = itypeI[of x "TYPE(nat)"] itypeI[of a "TYPE(bool list)"]
shows "hn_refine
(hn_ctxt nat_assn x xi * hn_ctxt (array_assn bool_assn) a ai)
(?c::?'c Heap) ?Γ' ?R
(do { ASSERT (x<length a); RETURN (a!x) })"
by sepref
text ‹The above command asks Sepref to synthesize a program, in a heap context where there
is a natural number, refined by ‹nat_assn›, and a list of booleans, refined
by ‹array_assn bool_assn›. The ‹id_rules› declarations declare the abstract variables to the
operation identification heuristics, such that they are recognized as operands.›
text ‹Using the alternative hfref-form, we can write:›
schematic_goal "(uncurry (?c), uncurry (λx a. do {ASSERT (x<length a); RETURN (a!x)}))
∈ nat_assn⇧k *⇩a (array_assn bool_assn)⇧k →⇩a bool_assn"
by sepref
text ‹This uses the specified assertions to derive the rules for
operation identification automatically. For this, it uses the
assertion-interface bindings declared in @{attribute intf_of_assn}.
If there is no such binding, it uses the HOL type as interface type.
›
thm intf_of_assn
text ‹
The sepref-method is split into various phases, which we will explain now
›
subsubsection ‹Preprocessing Phase›
text ‹
This tactic converts a goal in ‹hfref› form to the more basic ‹hn_refine› form.
It uses the theorems from @{attribute intf_of_assn} to add interface type declarations
for the generated operands. The final result is massaged by rewriting with
@{attribute to_hnr_post}, and then with @{attribute sepref_preproc}.
Moreover, this phase ensures that there is a constraint slot goal (see section on constraints).
›
text ‹The method @{method sepref_dbg_preproc} gives direct access to the preprocessing phase.›
thm sepref_preproc
thm intf_of_assn
thm to_hnr_post
subsubsection ‹Consequence Rule Phase›
text ‹This phase rewrites ‹hn_invalid _ x y› assertions in the postcondition to
‹hn_ctxt (λ_ _. true) x y› assertions, which are trivial to discharge.
Then, it applies @{thm [source] CONS_init}, to make postcondition and
result relation schematic, and introduce (separation logic) implications to
the originals, which are discharged after synthesis.
›
text ‹Use @{method sepref_dbg_cons_init} for direct access to this phase.
The method @{method weaken_hnr_post} performs the rewriting of ‹hn_invalid›
to ‹λ_ _. true› postconditions, and may be useful on its own for proving
combinator rules.
›
subsubsection ‹Operation Identification Phase›
text ‹The purpose of this phase is to identify the conceptual operations in the given program.
Consider, for example, a map @{term_type "m::'k⇀'v"}.
If one writes @{term "m(k↦v)"}, this is a map update. However, in Isabelle/HOL maps
are encoded as functions @{typ "'k ⇒ 'v option"}, and the map update is just syntactic
sugar for @{term [source] "fun_upd m k (Some v)"}. And, likewise, map lookup is just
function application.
However, the Sepref tool must be able to distinguish between maps and functions into the
option type, because maps shall be refined, to e.g., hash-tables, while functions into the
option type shall be not. Consider, e.g., the term @{term "Some x"}. Shall ‹Some› be
interpreted as the constructor of the option datatype, or as a map, mapping each element to
itself, and perhaps be implemented with a hashtable.
Moreover, for technical reasons, the translation phase of Sepref expects each operation
to be a single constant applied to its operands. This criterion is neither matched by map
lookup (no constant, just application of the first to the second operand), nor map update
(complex expression, involving several constants).
The operation identification phase uses a heuristics to find the conceptual types in a term
(e.g., discriminate between map and function to option), and rewrite the operations to single
constants (e.g. @{const op_map_lookup} for map lookup). The heuristics is a type-inference
algorithm combined with rewriting. Note that the inferred conceptual type does not necessarily
match the HOL type, nor does it have a semantic meaning, other than guiding the heuristics.
The heuristics store a set of typing rules for constants, in @{attribute id_rules}.
Moreover, it stores two sets of rewrite rules, in @{attribute pat_rules}
and @{attribute def_pat_rules}. A term is typed by first trying to apply a rewrite rule, and
then applying standard Hindley-Milner type inference rules for application and abstraction.
Constants (and free variables) are typed
using the ‹id_rules›. If no rule for a constant exists, one is inferred from the constant's
signature. This does not work for free variables, such that rules must be available
for all free variables. Rewrite rules from ‹pat_rules› are backtracked over, while rewrite rules
from ‹def_pat_rules› are always tried first and never backtracked over.
If typing succeeds, the result is the rewritten term.
For example, consider the type of maps. Their interface (or conceptual) type is
@{typ "('k,'v) i_map"}. The ‹id_rule› for map lookup is @{thm "op_map_lookup.itype"}.
Moreover, there is a rule to rewrite function application to map lookup (@{thm pat_map_lookup}).
It can be backtracked over, such that also functions into the option type are possible.
›
thm op_map_lookup.itype
thm pat_map_lookup
thm id_rules
text ‹
The operation identification phase, and all further phases, work on a tagged
version of the input term, where all function applications are replaced by the
tagging constant @{term "($)"}, and all abstractions are replaced by
@{term "λx. PROTECT2 (t x) DUMMY"} (syntax: @{term "λx. (#t x#)"},
input syntax: @{term "λ⇩2x. t x"}). This is required to tame Isabelle's
higher-order unification. However, it makes tagged terms quite unreadable, and it
may be helpful to ‹unfold APP_def PROTECT2_def› to get back the untagged form when inspecting
internal states for debugging purposes.
To prevent looping, rewrite-rules can use @{term "($')"} on the RHS. This is
a synonym for @{term "($)"}, and gets rewritten to @{term "($)"} after the operation
identification phase. During the operation identification phase, it prevents infinite
loops of pattern rewrite rules.
Interface type annotations can be added to the term using @{const CTYPE_ANNOT}
(syntax @{term "t:::⇩iTYPE('a)"}).
In many cases, it is desirable to treat complex terms as a single constant,
a standard example are constants defined inside locales, which may have locale
parameters attached. Those terms can be wrapped into an @{const PR_CONST} tag,
which causes them to be treated like a single constant. Such constants must always
have ‹id_rules›, as the interface type inference from the signature does not apply here.
›
subsubsection ‹Troubleshooting Operation Identification›
text ‹
If the operation identification fails, in most cases one has forgotten to register
an ‹id_rule› for a free variable or complex ‹PR_CONST› constant, or the identification
rule is malformed. Note that, in practice, identification rules are registered by
the @{command sepref_register} (see below), which catches many malformed rules, and
handles ‹PR_CONST› tagging automatically. Another frequent source of errors here is
forgetting to register a constant with a conceptual type other than its signature.
In this case, operation identification gets stuck trying to unify the signature's type with
the interface type, e.g., @{typ "'k ⇒ 'v option"} with @{typ "('k,'v)i_map"}.
The method @{method sepref_dbg_id} invokes the id-phase in isolation.
The method @{method sepref_dbg_id_keep} returns the internal state where type
inference got stuck. It returns a sequence of all stuck states, which can be inspected
using @{command back}.
The methods @{method sepref_dbg_id_init},@{method sepref_dbg_id_step},
and @{method sepref_dbg_id_solve} can be used to single-step the operation
identification phase. Here, solve applies single steps until the current subgoal is discharged.
Be aware that application of single steps allows no automatic backtracking, such that backtracking
has to be done manually.
›
text ‹Examples for identification errors›
context
fixes N::nat
notes [sepref_import_param] = IdI[of N]
begin
sepref_thm N_plus_2_example is "uncurry0 (RETURN (N+2))" :: "unit_assn⇧k →⇩a nat_assn"
apply sepref_dbg_keep
apply sepref_dbg_id_keep
oops
text ‹Solution: Register ‹n›, be careful not to export meaningless registrations from context!›
context
notes [[sepref_register_adhoc N]]
begin
sepref_thm N_plus_2_example is "uncurry0 (RETURN (N+2))" :: "unit_assn⇧k →⇩a nat_assn" by sepref
end
end
definition "my_map ≡ op_map_empty"
lemmas [sepref_fr_rules] = hm.empty_hnr[folded my_map_def]
sepref_thm my_map_example is "uncurry0 (RETURN (my_map(False↦1)))" :: "unit_assn⇧k →⇩a hm.assn bool_assn nat_assn"
apply sepref_dbg_keep
apply sepref_dbg_trans_keep
oops
text ‹Solution: Register with correct interface type›
sepref_register my_map :: "('k,'v) i_map"
sepref_thm my_map_example is "uncurry0 (RETURN (my_map(False↦1)))" :: "unit_assn⇧k →⇩a hm.assn bool_assn nat_assn"
by sepref
subsubsection ‹Monadify Phase›
text ‹
The monadify phase rewrites the program such that every operation becomes
visible on the monad level, that is, nested HOL-expressions are flattened.
Also combinators (e.g. if, fold, case) may get flattened, if special rules
are registered for that.
Moreover, the monadify phase fixes the number of operands applied to an operation,
using eta-expansion to add missing operands.
Finally, the monadify phase handles duplicate parameters to an operation, by
inserting a @{const COPY} tag. This is necessary as our tool expects the
parameters of a function to be separate, even for read-only
parameters@{footnote ‹Using fractional permissions or some other more fine grained
ownership model might lift this restriction in the future.›}.
›
text ‹The monadify phase consists of a number of sub-phases.
The method @{method sepref_dbg_monadify} executes the monadify phase,
the method @{method sepref_dbg_monadify_keep} stops at a failing sub-phase
and presents the internal goal state before the failing sub-phase.
›
subsubsection ‹Monadify: Arity›
text ‹In the first sub-phase, the rules from @{attribute sepref_monadify_arity}
are used to standardize the number of operands applied to a constant.
The rules work by rewriting each constant to a lambda-expression with the
desired number of arguments, and the using beta-reduction to account for
already existing arguments. Also higher-order arguments can be enforced,
for example, the rule for fold enforces three arguments, the function itself
having two arguments (@{thm fold_arity}).
In order to prevent arity rules being applied infinitely often,
the @{const SP} tag can be used on the RHS. It prevents anything inside
from being changed, and gets removed after the arity step.
The method @{method sepref_dbg_monadify_arity} gives you direct access to this phase.
In the Sepref-tool, we use the terminology @{emph ‹operator/operation›} for a function that
only has first-order arguments, which are evaluated before the function is applied (e.g. @{term "(+)"}),
and @{emph ‹combinator›} for operations with higher-order arguments or custom
evaluation orders (e.g. @{term "fold"}, @{term "If"}).
Note: In practice, most arity (and combinator) rules are declared automatically
by @{command sepref_register} or @{command sepref_decl_op}. Manual declaration
is only required for higher-order functions.
›
thm sepref_monadify_arity
subsubsection ‹Monadify: Combinators›
text ‹The second sub-phase flattens the term.
It has a rule for every function into @{typ "_ nres"} type, that determines
the evaluation order of the arguments. First-order arguments are evaluated before
an operation is applied. Higher-order arguments are treated specially, as they
are evaluated during executing the (combinator) operation. The rules are in
@{attribute sepref_monadify_comb}.
Evaluation of plain (non-monadic) terms is triggered by wrapping them into
the @{const EVAL} tag. The @{attribute sepref_monadify_comb} rules may also contain
rewrite-rules for the @{const EVAL} tag, for example to unfold plain combinators
into the monad (e.g. @{thm dflt_plain_comb}). If no such rule applies, the
default method is to interpret the head of the term as a function, and recursively
evaluate the arguments, using left-to-right evaluation order. The head of
a term inside @{const EVAL} must not be an abstraction. Otherwise, the
@{const EVAL} tag remains in the term, and the next sub-phase detects this
and fails.
The method @{method sepref_dbg_monadify_comb} executes the combinator-phase
in isolation.
›
subsubsection ‹Monadify: Check-Eval›
text ‹This phase just checks for remaining @{const EVAL} tags in the term,
and fails if there are such tags. The method @{method sepref_dbg_monadify_check_EVAL}
gives direct access to this phase.
Remaining @{const EVAL} tags indicate
higher-order functions without an appropriate setup of the combinator-rules
being used. For example:
›
definition "my_fold ≡ fold"
sepref_thm my_fold_test is "λl. do { RETURN (my_fold (λx y. x+y*2) l 0)}" :: "(list_assn nat_assn)⇧k→⇩anat_assn"
apply sepref_dbg_keep
apply sepref_dbg_monadify_keep
oops
text ‹Solution: Register appropriate arity and combinator-rules›
lemma my_fold_arity[sepref_monadify_arity]: "my_fold ≡ λ⇩2f l s. SP my_fold$(λ⇩2x s. f$x$s)$l$s" by auto
text ‹The combinator-rule rewrites to the already existing and set up combinator @{term nfoldli}:›
lemma monadify_plain_my_fold[sepref_monadify_comb]:
"EVAL$(my_fold$(λ⇩2x s. f x s)$l$s) ≡ (⤜)$(EVAL$l)$(λ⇩2l. (⤜)$(EVAL$s)$(λ⇩2s. nfoldli$l$(λ⇩2_. True)$(λ⇩2x s. EVAL$(f x s))$s))"
by (simp add: fold_eq_nfoldli my_fold_def)
sepref_thm my_fold_test is "λl. do { RETURN (my_fold (λx y. x+y*2) l 0)}" :: "(list_assn nat_assn)⇧k→⇩anat_assn"
by sepref
subsubsection ‹Monadify: Dup›
text ‹The last three phases, ‹mark_params›, ‹dup›, ‹remove_pass› are to detect
duplicate parameters, and insert ‹COPY› tags.
The first phase, ‹mark_params›, adds @{const PASS} tags around all parameters.
Parameters are bound variables and terms that have a refinement in the
precondition.
The second phase detects duplicate parameters and inserts @{const COPY} tags
to remove them. Finally, the last phase removes the @{const PASS} tags again.
The methods @{method sepref_dbg_monadify_mark_params},
@{method sepref_dbg_monadify_dup}, and @{method sepref_dbg_monadify_remove_pass}
gives you access to these phases.
›
subsubsection ‹Monadify: Step-Through Example›
text ‹
We give an annotated example of the monadify phase.
Note that the program utilizes a few features of monadify:
▪ The fold function is higher-order, and gets flattened
▪ The first argument to fold is eta-contracted. The missing argument is added.
▪ The multiplication uses the same argument twice. A copy-tag is inserted.
›
sepref_thm monadify_step_thru_test is "λl. do {
let i = length l;
RETURN (fold (λx. (+) (x*x)) l i)
}" :: "(list_assn nat_assn)⇧k →⇩a nat_assn"
apply sepref_dbg_preproc
apply sepref_dbg_cons_init
apply sepref_dbg_id
apply sepref_dbg_monadify_arity
apply sepref_dbg_monadify_comb
apply sepref_dbg_monadify_check_EVAL
apply sepref_dbg_monadify_mark_params
apply sepref_dbg_monadify_dup
apply sepref_dbg_monadify_remove_pass
apply sepref_dbg_opt_init
apply sepref_dbg_trans
apply sepref_dbg_opt
apply sepref_dbg_cons_solve
apply sepref_dbg_cons_solve
apply sepref_dbg_constraints
done
subsubsection ‹Optimization Init Phase›
text ‹This phase, accessed by @{method sepref_dbg_opt_init}, just applies the
rule @{thm TRANS_init} to set up a subgoal for a-posteriori optimization›
subsubsection ‹Translation Phase›
text ‹
The translation phase is the main phase of the Sepref tool.
It performs the actual synthesis of the imperative program from
the abstract one. For this, it integrates various components, among others,
a frame inference tool, a semantic side-condition solver and a monotonicity prover.
The translation phase consists of two major sub-phases:
Application of translation rules and solving of deferred constraints.
The method @{method sepref_dbg_trans} executes the translation phase,
@{method sepref_dbg_trans_keep} executes the translation phase,
presenting the internal goal state of a failed sub-phase.
The translation rule phase repeatedly applies translation steps, until the
subgoal is completely solved.
The main idea of the translation phase is, that for every abstract variable ‹x› in scope,
the precondition contains an assertion of the form @{term "hn_ctxt A x xi"}, indicating how
this variable is implemented. Common abbreviations are
@{term "hn_val R x xi ≡ hn_ctxt (pure R) x xi"}
and @{term "hn_invalid A x xi ≡ hn_ctxt (invalid_assn A) x xi"}.
›
subsubsection ‹Translation: Step›
text ‹
A translation step applies a single synthesis step for an operator,
or solves a deferred side-condition.
There are two types of translation steps: Combinator steps and operator steps.
A combinator step consists of applying a rule from @{attribute sepref_comb_rules}
to the goal-state. If no such rule applies, the rules are tried again after rewriting
the precondition with @{attribute sepref_frame_normrel_eqs} (see frame-inference).
The premises of the combinator rule become new subgoals, which are solved by
subsequent steps. No backtracking is applied over combinator rules.
This restriction has been introduced to make the tool more deterministic, and hence
more manageable.
An operator step applies an operator rule (from @{attribute sepref_fr_rules})
with frame-inference, and then tries to solve the resulting side conditions
immediately. If not all side-conditions can be solved, it backtracks over the
application of the operator rule.
Note that, currently, side conditions to operator rules cannot contain
synthesis goals themselves. Again, this restriction reduces the tool's
complexity by avoiding deep nesting of synthesis. However, it hinders
the important feature of generic algorithms, where an operation can issue
synthesis subgoals for required operations it is built from (E.g., set union
can be implemented by insert and iteration). Our predecessor tool, Autoref,
makes heavy use of this feature, and we consider dropping the restriction in
the near future.
An operator-step itself consists of several sub-phases:
➧[Align goal] Splits the precondition into the arguments actually occurring in
the operation, and the rest (called frame).
➧[Frame rule] Applies a frame rule to focus on the actual arguments. Moreover,
it inserts a subgoal of the form @{term "RECOVER_PURE Γ Γ'"}, which is used
to restore invalidated arguments if possible. Finally, it generates an assumption
of the form @{term "vassn_tag Γ'"}, which means that the precondition holds
on some heap. This assumption is used to extract semantic information from the
precondition during side-condition solving.
➧[Recover pure] This phase tries to recover invalidated arguments.
An invalidated argument is one that has been destroyed by a previous operation.
It occurs in the precondition as @{term "hn_invalid A x xi"}, which indicates
that there exists a heap where the refinement holds. However, if the refinement
assertion ‹A› does not depend on the heap (is ∗‹pure›), the invalidated argument
can be recovered. The purity assumption is inserted as a constraint (see constraints),
such that it can be deferred.
➧[Apply rule] This phase applies a rule from @{attribute sepref_fr_rules} to
the subgoal. If there is no matching rule, matching is retried after rewriting
the precondition with @{attribute sepref_frame_normrel_eqs}. If this does not succeed
either, a consequence rule is used on the precondition. The implication becomes an
additional side condition, which will be solved by the frame inference tool.
To avoid too much backtracking, the new precondition
is massaged to have the same structure as the old one, i.e., it contains a (now schematic)
refinement assertion for each operand. This excludes rules for which the frame inference
would fail anyway.
If a matching rule is found, it is applied and all new subgoals are solved by the
side-condition solver. If this fails, the tool backtracks over the application of
the @{attribute sepref_fr_rules}-rules. Note that direct matches prevent precondition
simplification, and matches after precondition simplification prevent the consequence
rule to be applied.
The method @{method sepref_dbg_trans_step} performs a single translation step.
The method @{method sepref_dbg_trans_step_keep} presents the internal goal state
on failure. If it fails in the ‹apply-rule› phase, it presents the sequence of
states with partially unsolved side conditions for all matching rules.
›
subsubsection ‹Translation: Side Conditions›
text ‹The side condition solver is used to discharge goals that arise as
side-conditions to the translation rules. It does a syntactic discrimination
of the side condition type, and then invokes the appropriate solver. Currently,
it supports the following side conditions:
➧[Merge] (‹_∨⇩A_ ⟹⇩t _›). These are used to merge postconditions from different
branches of the program (e.g. after an if-then-else). They are solved by the
frame inference tool (see section on frame inference).
➧[Frame] (‹_ ⟹⇩t _›). Used to match up the current precondition against the
precondition of the applied rule. Solved by the frame inference tool (see section on frame inference).
➧[Independence] (‹INDEP (?R x⇩1 … x⇩n)›). Deprecated. Used to instantiate a
schematic variable such that it does not depend on any bound variables any more.
Originally used to make goals more readable, we are considering of dropping this.
➧[Constraints] (‹CONSTRAINT _ _›) Apply solver for deferrable constraints (see section on constraints).
➧[Monotonicity] (‹mono_Heap _›) Apply monotonicity solver. Monotonicity subgoals occur when
translating recursion combinators. Monadic expressions are monotonic by construction, and
this side-condition solver just forwards to the monotonicity prover of the partial
function package, after stripping any preconditions from the subgoal, which are
not supported by the case split mechanism of the monotonicity prover (as of Isabelle2016).
➧[Prefer/Defer] (‹PREFER_tag _›/‹DEFER_tag›). Deprecated. Invoke the tagged solver of
the Autoref tool. Used historically for importing refinements from the Autoref tool,
but as Sepref becomes more complete imports from Autoref are not required any more.
➧[Resolve with Premise] ‹RPREM _› Resolve subgoal with one of its premises.
Used for translation of recursion combinators.
➧[Generic Algorithm] ‹GEN_ALGO _ _› Triggers resolution with a rule from
@{attribute sepref_gen_algo_rules}. This is a poor-man's version of generic
algorithm, which is currently only used to synthesize to-list conversions for foreach-loops.
➧[Fallback] (Any pattern not matching the above, nor being a ‹hn_refine› goal).
Unfolds the application and abstraction tagging, as well as @{term bind_ref_tag} tags
which are inserted by several translation rules to indicate the value a variable has
been bound to, and then tries to solve the goal by @{method auto}, after freezing
schematic variables. This tactic is used to discharge semantic side conditions, e.g.,
in-range conditions for array indexing.
Methods: @{method sepref_dbg_side} to apply a side-condition solving step,
@{method sepref_dbg_side_unfold} to apply the unfolding of application and binding tags and
@{method sepref_dbg_side_keep} to return the internal state after failed side-condition solving.
›
subsubsection ‹Translation: Constraints›
text ‹During the translation phase, the refinement of operands is not
always known immediately, such that schematic variables may occur as refinement
assertions. Side conditions on those refinement assertions cannot be discharged
until the schematic variable gets instantiated.
Thus, side conditions may be tagged with @{const CONSTRAINT}.
If the side condition solver encounters a constraint side condition, it first removes
the constraint tag (@{thm CONSTRAINT_I}) and freezes all schematic variables to prevent them from
accidentally getting instantiated. Then it simplifies with @{attribute constraint_simps} and
tries to solve the goal using rules from
@{attribute safe_constraint_rules} (no backtracking)
and @{attribute constraint_rules} (with backtracking).
If solving the constraint is not successful, only the safe rules are applied, and the
remaining subgoals are moved to a special ‹CONSTRAINT_SLOT› subgoal, that always is the
last subgoal, and is initialized by the preprocessing phase of Sepref.
Moving the subgoal to the constraint slot looks for Isabelle's tacticals like the subgoal
has been solved. In reality, it is only deferred and must be solved later.
Constraints are used in several phases of Sepref, and all constraints are solved
at the end of the translation phase, and at the end of the Sepref invocation.
Methods:
▪ @{method solve_constraint} to apply constraint solving, the @{const CONSTRAINT}-tag is optional.
▪ @{method safe_constraint} to apply safe rules, the @{const CONSTRAINT}-tag is optional.
▪ @{method print_slot} to print the contents of the constraint slot.
›
subsubsection ‹Translation: Merging and Frame Inference›
text ‹Frame inference solves goals of the form ‹Γ ⟹⇩t Γ'›.
For this, it matches ‹hn_ctxt› components in ‹Γ'› with those in ‹Γ›.
Matching is done according to the refined variables.
The matching pairs and the rest is then treated differently:
The rest is resolved by repeatedly applying the rules from @{thm frame_rem_thms}.
The matching pairs are resolved by repeatedly applying rules from
@{thm frame_thms} and @{attribute sepref_frame_match_rules}.
Any non-frame premise of these rules must be solved immediately by the
side-condition's constraint or fallback tactic (see above). The tool backtracks over rules.
If no rule matches (or side-conditions cannot be solved), it simplifies the goal
with @{attribute sepref_frame_normrel_eqs} and tries again.
For merge rules, the theorems @{thm merge_thms}
and @{attribute sepref_frame_merge_rules} are used.
Note that a smart setup of frame and match rules together with side conditions makes
the frame matcher a powerful tool for encoding structural and semantic information
into relations. An example for structural information are the match rules for lists,
which forward matching of list assertions to matching of the element assertions,
maintaining the congruence assumption that the refined elements are actually elements
of the list: @{thm list_match_cong}.
An example for semantic information is the bounded assertion, which intersects
any given assertion with a predicate on the abstract domain. The frame matcher is
set up such that it can convert between bounded assertions, generating semantic
side conditions to discharge implications between bounds (@{thm b_assn_subtyping_match}).
This is essentially a subtyping mechanism on the level of refinement assertions,
which is quite useful for maintaining natural side conditions on operands.
A standard example is to maintain a list of array indices: The refinement assertion
for array indices is @{term nat_assn} restricted to indices that are in range:
@{term "nbn_assn N"}. When inserting natural numbers into this list, one has to
prove that they are actually in range (conversion from @{term nat_assn} to @{term nbn_assn}).
Elements of the list can be used as natural numbers (conversion from @{term nbn_assn}
to @{term nat_assn}). Additionally, the side condition solver can derive that the predicate
holds on the abstract variable (via the @{const vassn_tag} inserted by the operator steps).
›
subsubsection ‹Translation: Annotated Example›
context
fixes N::nat
notes [[sepref_register_adhoc N]]
notes [sepref_import_param] = IdI[of N]
begin
text ‹This worked example utilizes the following features of the translation phase:
▪ We have a fold combinator, which gets translated by its combinator rule
▪ We add a type annotation which enforces converting the natural numbers
inserted into the list being refined by ‹nbn_assn N›, i.e., smaller than ‹N›.
▪ We can only prove the numbers inserted into the list to be smaller than ‹N›
because the combinator rule for ‹If› inserts congruence assumptions.
▪ By moving the elements from the list to the set, they get invalidated.
However, as ‹nat_assn› is pure, they can be recovered later, allowing us to
mark the list argument as read-only.
›
sepref_thm filter_N_test is "λl. RETURN (fold (λx s.
if x<N then insert (ASSN_ANNOT (nbn_assn N) x) s else s
) l op_hs_empty)" :: "(list_assn nat_assn)⇧k →⇩a hs.assn (nbn_assn N)"
apply sepref_dbg_preproc
apply sepref_dbg_cons_init
apply sepref_dbg_id
apply sepref_dbg_monadify
apply sepref_dbg_opt_init
apply sepref_dbg_trans_step
apply sepref_dbg_trans_step
apply sepref_dbg_trans_step
apply sepref_dbg_trans_step
apply sepref_dbg_trans_step
apply sepref_dbg_trans_step
apply sepref_dbg_trans_step
apply sepref_dbg_trans_step
apply sepref_dbg_trans_step
apply sepref_dbg_trans_step
apply sepref_dbg_trans_step
apply sepref_dbg_trans_step
apply sepref_dbg_trans_step
apply sepref_dbg_trans_step
apply sepref_dbg_trans_step
apply sepref_dbg_trans_step
apply sepref_dbg_trans_step
apply sepref_dbg_trans_step
apply sepref_dbg_trans_step
apply sepref_dbg_trans_step
apply sepref_dbg_trans_step
apply sepref_dbg_trans_step
apply sepref_dbg_trans_step
apply sepref_dbg_trans_step
apply sepref_dbg_trans_step
apply sepref_dbg_opt
apply sepref_dbg_cons_solve
apply sepref_dbg_cons_solve
apply sepref_dbg_constraints
done
end
subsubsection ‹Optimization Phase›
text ‹The optimization phase simplifies the generated
program, first with @{attribute sepref_opt_simps}, and
then with @{attribute sepref_opt_simps2}.
For simplification, the tag @{const CNV} is used, which is discharged
with @{thm CNV_I} after simplification.
Method @{method sepref_dbg_opt} gives direct access to this phase.
The simplification is used to beautify the generated code.
The most important simplifications collapse code that does not
depend on the heap to plain expressions (using the monad laws), and
apply certain deforestation optimizations.
Consider the following example:
›
sepref_thm opt_example is "λn. do { let r = fold (+) [1..<n] 0; RETURN (n*n+2) }"
:: "nat_assn⇧k →⇩a nat_assn"
apply sepref_dbg_preproc
apply sepref_dbg_cons_init
apply sepref_dbg_id
apply sepref_dbg_monadify
apply sepref_dbg_opt_init
apply sepref_dbg_trans
supply [[show_main_goal]]
apply sepref_dbg_opt
apply sepref_dbg_cons_solve
apply sepref_dbg_cons_solve
apply sepref_dbg_constraints
done
subsubsection ‹Cons-Solve Phases›
text ‹These two phases, accessible via @{method sepref_dbg_cons_solve},
applies the frame inference tool to solve the two implications generated
by the consequence rule phase.
›
subsubsection ‹Constraints Phase›
text ‹
This phase, accessible via @{method sepref_dbg_constraints}, solve the
deferred constraints that are left, and then removes the ‹CONSTRAINT_SLOT›
subgoal.
›
subsection ‹Refinement Rules›
text ‹
There are two forms of specifying refinement between an Imperative/HOL program
and an abstract program in the ‹nres›-monad.
The ‹hn_refine› form (also hnr-form) is the more low-level form.
The term @{term "P ⟹ hn_refine Γ c Γ' R a"} states that, under precondition ‹P›, for
a heap described by ‹Γ›, the Imperative/HOL program ‹c› produces a heap described by
‹Γ'› and the result is refined by ‹R›. Moreover, the abstract result is among the possible
results of the abstract program ‹a›.
This low-level form formally enforces no restrictions on its arguments, however, there are
some assumed by our tool:
▪ ‹Γ› must have the form ‹hn_ctxt A⇩1 x⇩1 xi⇩1 * … * hn_ctxt A⇩n x⇩n xi⇩n›
▪ ‹Γ'› must have the form ‹hn_ctxt B⇩1 x⇩1 xi⇩1 * … * hn_ctxt B⇩n x⇩n xi⇩n›
where either ‹B⇩i = A⇩i› or ‹B⇩i = invalid_assn A⇩i›. This means that each argument to
the program is either preserved or destroyed.
▪ ‹R› must not contain a ‹hn_ctxt› tag.
▪ ‹a› must be in protected form (@{term "($)"} and @{term "PROTECT2"} tags)
The high-level ‹hfref› form formally enforces these restrictions. Moreover,
it assumes ‹c› and ‹a› to be presented as functions from exactly one argument.
For constants or functions with more arguments, you may use @{term uncurry0}
and @{term uncurry}. (Also available @{term uncurry2} to @{term uncurry5}).
The general form is ‹PC ⟹ (uncurry⇩x f, uncurry⇩x g) ∈ [P]⇩a A⇩1⇧k⇧1 *⇩a … *⇩a A⇩n⇧k⇧n → R›,
where ‹ki› is ‹k› if the argument is preserved (kept) or ‹d› is it is destroyed.
‹PC› are preconditions of the rule that do not depend on the arguments, usually
restrictions on the relations. ‹P› is a predicate on the single argument of ‹g›,
representing the precondition that depends on the arguments.
Optionally, ‹g› may be of the form ‹RETURN o…o g'›, in which case the rule
applies to a plain function.
If there is no precondition, there is a shorter
syntax: @{term "Args→⇩aR ≡ [λ_. True]⇩a Args→R"}.
For example, consider @{thm [source] arl_swap_hnr[unfolded pre_list_swap_def]}.
It reads @{term "CONSTRAINT is_pure A ⟹
(uncurry2 arl_swap, uncurry2 (RETURN ∘∘∘ op_list_swap))
∈ [λ((l, i), j). i < length l ∧ j < length l]⇩a
(arl_assn A)⇧d *⇩a nat_assn⇧k *⇩a nat_assn⇧k → arl_assn A"}
We have three arguments, the list and two indexes. The refinement assertion ‹A›
for the list elements must be pure, and the indexes must be in range.
The original list is destroyed, the indexes are kept.
›
thm arl_swap_hnr[unfolded pre_list_swap_def, no_vars]
subsubsection ‹Converting between hfref and hnr form›
text ‹A subgoal in hfref form is converted to hnr form by
the preprocessing phase of Sepref (see there for a description).
Theorems with hnr/hfref conclusions can be converted
using @{attribute to_hfref}/@{attribute to_hnr}.
This conversion is automatically done for rules registered with
@{attribute sepref_fr_rules}, such that this attribute accepts both forms.
Conversion to hnr-form can be controlled by specifying
@{attribute to_hnr_post} unfold-rules, which are applied after the conversion.
Note: These currently contain hard-coded rules to handle ‹RETURN o…o _› for up
to six arguments. If you have more arguments, you need to add corresponding rules here,
until this issue is fixed and the tool can produce such rules automatically.
Similarly, @{attribute to_hfref_post} is applied after conversion to hfref form.
›
thm to_hnr_post
thm to_hfref_post
subsubsection ‹Importing Parametricity Theorems›
text ‹For pure refinements, it is sometimes simpler to specify a parametricity
theorem than a hnr/hfref theorem, in particular as there is a large number of
parametricity theorems readily available, in the parametricity component or Autoref,
and in the Lifting/Transfer tool.
Autoref uses a set-based notation for parametricity theorems
(e.g. @{term "((@),(@)) ∈ ⟨A⟩list_rel → ⟨A⟩list_rel → ⟨A⟩list_rel"}),
while lifting/transfer uses a predicate based notation (e.g.
@{term "rel_fun (list_all2 A) (rel_fun (list_all2 A) (list_all2 A)) (@) (@)"}).
Currently, we only support the Autoref style, but provide a few lemmas that
ease manual conversion from the Lifting/Transfer style.
Given a parametricity theorem, the attribute @{attribute sepref_param}
converts it to a hfref theorem, the attribute
@{attribute sepref_import_param} does the conversion and registers the result
as operator rule.
Relation variables are converted to assertion variables with an ‹is_pure› constraint.
The behaviour can be customized by @{attribute sepref_import_rewrite}, which
contains rewrite rules applied in the last but one step of the conversion, before
converting relation variables to assertion variables.
These theorems can be used to convert relations to there corresponding assertions,
e.g., @{thm list_assn_pure_conv[symmetric]} converts a list relation to a list
assertion.
For debugging purposes, the attribute @{attribute sepref_dbg_import_rl_only}
converts a parametricity theorem to a hnr-theorem. This is the first step of
the standard conversion, followed by a conversion to hfref form.
›
thm sepref_import_rewrite
thm param_append
thm param_append[sepref_param]
thm param_append[sepref_dbg_import_rl_only]
text ‹For re-using Lifting/Transfer style theorems, the constants
@{const p2rel} and @{const rel2p} may be helpful, however, there is no
automation available yet.
Usage examples can be found in, e.g., @{theory Refine_Imperative_HOL.IICF_Multiset}, where we
import parametricity lemmas for multisets from the Lifting/Transfer package.
›
thm p2rel
thm rel2p
subsection ‹Composition›
subsubsection ‹Fref-Rules›
text ‹
In standard parametricity theorems as described above, one cannot specify
preconditions for the parameters, e.g., @{term hd} is only parametric for
non-empty lists.
As of Isabelle2016, the Lifting/Transfer package cannot specify
such preconditions at all.
Autoref's parametricity tool can specify such preconditions by using first-order rules,
(cf. @{thm param_hd}). However, currently, @{attribute sepref_import_param} cannot handle
these first-order rules.
Instead, Sepref supports the fref-format for parametricity rules, which resembles the
hfref-format: Abstract and concrete objects are functions with exactly one parameter,
uncurried if necessary. Moreover, there is an explicit precondition.
The syntax is ‹(uncurry⇩x f, uncurry⇩x g) ∈ [P]⇩f (...(R⇩1×⇩rR⇩2)×⇩r...)×⇩rR⇩n) → R›,
and without precondition, we have ‹(...(R⇩1×⇩rR⇩2)×⇩r...)×⇩rR⇩n) →⇩f R›.
Note the left-bracketing of the tuples, which is non-standard in Isabelle.
As we currently have no syntax for a left-associative product relation, we
use the right-associative syntax @{term "(×⇩r)"} and explicit brackets.
The attribute @{attribute to_fref} can convert (higher-order form) parametricity
theorems to the fref-form.
›
subsubsection ‹Composition of hfref and fref theorems›
text ‹
fref and hfref theorems can be composed, if the
abstract function or the first theorem equals the concrete function of the
second theorem. Currently, we can compose an hfref with an fref theorem,
yielding a hfref theorem, and two fref-theorems, yielding an fref theorem.
As we do not support refinement of heap-programs, but only refinement ∗‹into› heap
programs, we cannot compose two hfref theorems.
The attribute @{attribute FCOMP} does these compositions and normalizes the result.
Normalization consists of precondition simplification, and distributing composition
over products, such that composition can be done argument-wise.
For this, we unfold with @{attribute fcomp_norm_unfold}, and then simplify with
@{attribute fcomp_norm_simps}.
The ‹FCOMP› attribute tries to convert its arguments to hfref/fref form, such that
it also accepts hnr-rules and parametricity rules.
The standard use-case for ‹FCOMP› is to compose multiple refinement steps to
get the final correctness theorem. Examples for this are in the quickstart guide.
Another use-case for ‹FCOMP› is to compose a refinement theorem of a
container operation, that refines the elements by identity, with a parametricity theorem
for the container operation, that adds a (pure) refinement of the elements.
In practice, the high-level utilities @{command sepref_decl_op} and
@{command sepref_decl_impl} are used for this purpose. Internally, they use ‹FCOMP›.
›
thm fcomp_norm_unfold
thm fcomp_norm_simps
thm array_get_hnr_aux
thm "op_list_get.fref"
thm array_get_hnr_aux[FCOMP op_list_get.fref]
context
notes [fcomp_norm_unfold] = array_assn_def[symmetric]
begin
thm array_get_hnr_aux[FCOMP op_list_get.fref]
end
subsection ‹Registration of Interface Types›
text ‹
An interface type represents some conceptual type, which is encoded to a
more complex type in HOL. For example, the interface type @{typ "('k,'v)i_map"}
represents maps, which are encoded as @{typ "'k ⇒ 'v option"} in HOL.
New interface types must be registered by the command @{command sepref_decl_intf}.
›
sepref_decl_intf ('a,'b) i_my_intf is "'a*'a ⇒ 'b option"
sepref_decl_intf ('a,'b) i_my_intf2 (infix "*→⇩i" 0) is "'a*'a ⇒ 'b option"
subsection ‹Registration of Abstract Operations›
text ‹
Registering a new abstract operation requires some amount of setup,
which is automated by the ‹sepref_register› tool. Currently, it only
works for operations, not for combinators.
The @{command sepref_register} command takes a list of terms and registers
them as operators. Optionally, each term can have an interface type annotation.
If there is no interface type annotation, the interface type is derived from the
terms HOL type, which is rewritten by the theorems from @{attribute map_type_eqs}.
This rewriting is useful for bulk-setup of many constants with conceptual types
different from there HOL-types.
Note that the interface type must correspond to the HOL type of the registered term,
otherwise, you'll get an error message.
If the term is not a single constant or variable, and does not already start
with a @{const PR_CONST} tag, such a tag will be added, and also a pattern rule
will be registered to add the tag on operator identification.
If the term has a monadic result type (@{typ "_ nres"}), also an
arity and combinator rule for the monadify phase are generated.
There is also an attribute version @{attribute "sepref_register_adhoc"}.
It has the same syntax, and generates the same theorems, but does not give
names to the theorems. It's main application is to conveniently register fixed
variables of a context. Warning: Make sure not to export such an attribute from
the context, as it may become meaningless outside the context, or worse, confuse
the tool.
›
text ‹Example for bulk-registration, utilizing type-rewriting›
definition "map_op1 m n ≡ m(n↦n+1)"
definition "map_op2 m n ≡ m(n↦n+2)"
definition "map_op3 m n ≡ m(n↦n+3)"
definition "map_op_to_map (m::'a⇀'b) ≡ m"
context
notes [map_type_eqs] = map_type_eqI[of "TYPE('a⇀'b)" "TYPE(('a,'b)i_map)"]
begin
sepref_register map_op1 map_op2 map_op3
sepref_register map_op_to_map :: "('a⇀'b) ⇒ ('a,'b) i_map"
end
text ‹Example for insertion of ‹PR_CONST› tag and attribute-version›
context
fixes N :: nat and D :: int
notes [[sepref_register_adhoc N D]]
notes [sepref_import_param] = IdI[of N] IdI[of D]
begin
definition "newlist ≡ replicate N D"
sepref_register newlist
print_theorems
sepref_register other_basename_newlist: newlist
print_theorems
sepref_register yet_another_basename_newlist: "PR_CONST newlist"
print_theorems
end
text ‹Example for mcomb/arity theorems›
definition "select_a_one l ≡ SPEC (λi. i<length l ∧ l!i = (1::nat))"
sepref_register "select_a_one"
print_theorems
text ‹
The following command fails, as the specified interface type does not
correspond to the HOL type of the term:
@{theory_text ‹sepref_register hd :: "(nat,nat) i_map"›}
›
subsection ‹High-Level tools for Interface/Implementation Declaration›
text ‹
The Imperative Isabelle Collections Framework (IICF), which comes with Sepref,
has a concept of interfaces, which specify a set of abstract operations for
a conceptual type, and implementations, which implement these operations.
Each operation may have a natural precondition, which is established already
for the abstract operation. Many operations come in a plain version, and a
monadic version which asserts the precondition. Implementations may
strengthen the precondition with implementation specific preconditions.
Moreover, each operation comes with a parametricity lemma.
When registering an implementation, the refinement of the implementation is
combined with the parametricity lemma to allow for (pure) refinements of the
element types.
@{rail ‹@@{command sepref_decl_op} ('(' @{text flags} ')')? ⏎
(@{text name} @':')? @{text term} @'::' @{text term} ⏎
(@'where' @{text props})? ›}
The command @{command sepref_decl_op} declares an abstract operation.
It takes a term defining the operation, and a parametricity relation.
It generates the monadic version from the plain version, defines constants
for the operations, registers them, and tries to prove parametricity lemmas
automatically. Parametricity must be proved for the operation, and for the
precondition. If the automatic parametricity proofs fail, the user gets
presented goals that can be proven manually.
Optionally, a basename for the operation can be specified. If none is specified,
a heuristics tries to derive one from the specified term.
A list of properties (separated by space and/or ‹and›) can be specified,
which get constraint-preconditions of the relation.
Finally, the following flags can be specified. Each flag can be prefixed by ‹no_›
to invert its meaning:
➧[mop] (default: true) Generate monadic version of operation
➧[ismop] (default: false) Indicate that given term is the monadic version
➧[rawgoals] (default: false) Present raw goals to user, without attempting to prove them
➧[def] (default: true) Define a constant for the specified term. Otherwise, use the specified term literally.
›
text ‹
@{rail ‹@@{command sepref_decl_impl} ('(' @{text flags} ')')? ⏎
(@{text name} @':')? (@'[' @{text term} @']')? ⏎
@{text thm} (@'uses' @{text thm})?
›}
The @{command sepref_decl_impl} command declares an implementation of an interface operation.
It takes a refinement theorem for the implementation, and combines it with the corresponding
parametricity theorem. After ‹uses›, one can override the parametricity theorem to be used.
A heuristics is used to merge the preconditions of the refinement and parametricity theorem.
This heuristics can be overridden by specifiying the desired precondition inside ‹[…]›.
Finally, the user gets presented remaining subgoals that cannot be solved by the heuristics.
The command accepts the following flags:
➧[mop] (default: true) Generate implementation for monadic version
➧[ismop] (default: false) Declare that the given theorems refer to the monadic version
➧[transfer] (default: true) Try to automatically transfer the implementation's precondition
over the argument relation from the parametricity theorem.
➧[rawgoals] (default: false) Do not attempt to solve or simplify the goals
➧[register] (default: true) Register the generated theorems as operation rules.
›
subsection ‹Defining synthesized Constants›
text ‹
The @{command sepref_definition} allows one to specify a name, an abstract term and
a desired refinement relation in hfref-form. It then sets up a goal that can be
massaged (usually, constants are unfolded and annotations/implementation specific
operations are added) and then solved by @{method sepref}.
After the goal is solved, the command extracts the synthesized term and defines it as
a constant with the specified name. Moreover, it sets up code equations for the constant,
correctly handling recursion combinators. Extraction of code equations is controlled by the
‹prep_code› flag. Examples for this command can be found in the quickstart guide.
›
end
Theory Sepref_Guide_General_Util
section ‹General Purpose Utilities›
theory Sepref_Guide_General_Util
imports "../IICF/IICF"
begin
text ‹This userguide documents some of the general purpose utilities that
come with the Sepref tool, but are useful in other contexts, too.›
subsection ‹Methods›
subsubsection ‹Resolve with Premises›
text ‹The @{method rprems} resolves the current subgoal with
one of its premises. It returns a sequence of possible resolvents.
Optionally, the number of the premise to resolve with can be specified.
›
subsubsection ‹First-Order Resolution›
text ‹The @{method fo_rule} applies a rule with first-order matching.
It is very useful to be used with theorems like @{thm arg_cong}.›
notepad begin
have "card {x. 3<x ∧ x<(7::nat)} = card {x. 4≤x ∧ x≤(6::nat)}"
apply (fo_rule arg_cong)
apply auto
done
fix f :: "nat set ⇒ nat set ⇒ bool"
have "⋀a. f {x. x*2 + a + 3 < 10} {x. 3<x ∧ x<(7::nat)} = f {x. x*2 + a ≤6} {x. 4≤x ∧ x≤(6::nat)}"
apply (fo_rule arg_cong fun_cong cong)+
apply auto
done
end
subsubsection ‹Clarsimp all goals›
text ‹@{method clarsimp_all} is a ‹clarsimp› on all goals.
It takes the same arguments as ‹clarsimp›.›
subsubsection ‹VCG solver›
text ‹@{method vc_solve} clarsimps all subgoals.
Then, it tries to apply a rule specified in the ‹solve: › argument,
and tries to solve the result by ‹auto›. If the goal cannot be solved this way,
it is not changed.
This method is handy to be applied after verification condition generation.
If ‹auto› shall be tried on all subgoals, specify ‹solve: asm_rl›.
›
subsection ‹Structured Apply Scripts (experimental)›
text ‹A few variants of the apply command, that document the
subgoal structure of a proof. They are a lightweight alternative to
@{command subgoal}, and fully support schematic variables.
@{command applyS} applies a method to the current subgoal, and fails if the
subgoal is not solved.
@{command apply1} applies a method to the current subgoal, and fails if
the goal is solved or additional goals are created.
@{command focus} selects the current subgoal, and optionally applies a method.
@{command applyF} selects the current subgoal and applies a method.
@{command solved} enforces no subgoals to be left in the current selection, and unselects.
Note: The selection/unselection mechanism is a primitive version of focusing
on a subgoal, realized by inserting protect-tags into the goal-state.
›
subsection ‹Extracting Definitions from Theorems›
text ‹
The @{command concrete_definition} can be used to extract parts of a theorem
as a constant. It is documented at the place where it is defined
(ctrl-click to jump there).
›
end
Theory Sepref_ICF_Bindings
theory Sepref_ICF_Bindings
imports Sepref_Tool
Collections.Refine_Dflt_ICF
"IICF/IICF"
begin
subsection ‹Miscellaneous›
lemma (in -) rev_append_hnr[param,sepref_import_param]:
"(rev_append, rev_append) ∈ ⟨A⟩list_rel → ⟨A⟩list_rel → ⟨A⟩list_rel"
unfolding rev_append_def by parametricity
subsection ‹Sets by List›
lemma lsr_finite[simp, intro]: "(l,s)∈⟨R⟩list_set_rel ⟹ finite s"
by (auto simp: list_set_rel_def br_def)
lemma it_to_sorted_list_triv:
assumes "distinct l"
shows "RETURN l ≤ it_to_sorted_list (λ_ _. True) (set l)"
using assms unfolding it_to_sorted_list_def
by refine_vcg auto
lemma [sepref_gen_algo_rules]: "GEN_ALGO (return) (IS_TO_SORTED_LIST (λ_ _. True) (pure (⟨A⟩list_set_rel)) (pure A))"
unfolding GEN_ALGO_def IS_TO_SORTED_LIST_def
apply (simp add: list_assn_pure_conv)
apply rule
apply rule
apply (sep_auto simp: pure_def intro: it_to_sorted_list_triv simp: list_set_rel_def br_def)
done
lemma list_set_rel_compp:
assumes "IS_LEFT_UNIQUE A" "IS_RIGHT_UNIQUE A"
shows "⟨Id⟩list_set_rel O ⟨A⟩set_rel = ⟨A⟩list_set_rel"
unfolding list_set_rel_def
proof (safe; clarsimp simp: in_br_conv)
fix x z
assume "(set x,z)∈⟨A⟩set_rel" "distinct x"
from obtain_list_from_setrel[OF ‹IS_RIGHT_UNIQUE A› this(1)] obtain zl where
[simp]: "z = set zl" and X_ZL: "(x, zl) ∈ ⟨A⟩list_rel" .
have "distinct zl"
using param_distinct[OF assms, THEN fun_relD, OF X_ZL] ‹distinct x›
by auto
show "(x,z) ∈ ⟨A⟩list_rel O br set distinct"
apply (rule relcompI[OF X_ZL])
by (auto simp: in_br_conv ‹distinct zl›)
next
fix x y
assume XY: "(x, y) ∈ ⟨A⟩list_rel" and "distinct y"
have "distinct x"
using param_distinct[OF assms, THEN fun_relD, OF XY] ‹distinct y›
by auto
show "(x, set y) ∈ br set distinct O ⟨A⟩set_rel"
apply (rule relcompI[where b="set x"])
subgoal by (auto simp: in_br_conv ‹distinct x›)
subgoal by (rule param_set[OF ‹IS_RIGHT_UNIQUE A›, THEN fun_relD, OF XY])
done
qed
lemma GEN_OP_EQ_Id: "GEN_OP (=) (=) (Id→Id→bool_rel)" by simp
hide_const (open) Intf_Set.op_set_isEmpty Intf_Set.op_set_delete
lemma autoref_import_set_unfolds:
"{} = op_set_empty"
"uncurry (RETURN oo (∈)) = uncurry (RETURN oo op_set_member)"
"Intf_Set.op_set_isEmpty = IICF_Set.op_set_is_empty"
"Intf_Set.op_set_delete = IICF_Set.op_set_delete"
"insert = IICF_Set.op_set_insert"
by (auto intro!: ext)
context fixes A :: "'a ⇒ 'ai ⇒ assn" begin
private lemma APA: "⟦PROP Q; CONSTRAINT is_pure A⟧ ⟹ PROP Q" .
private lemma APAru: "⟦PROP Q; CONSTRAINT (IS_PURE IS_RIGHT_UNIQUE) A⟧ ⟹ PROP Q" .
private lemma APAlu: "⟦PROP Q; CONSTRAINT (IS_PURE IS_LEFT_UNIQUE) A⟧ ⟹ PROP Q" .
private lemma APAbu: "⟦PROP Q; CONSTRAINT (IS_PURE IS_LEFT_UNIQUE) A; CONSTRAINT (IS_PURE IS_RIGHT_UNIQUE) A⟧ ⟹ PROP Q" .
definition "list_set_assn = pure (⟨Id⟩list_set_rel O ⟨the_pure A⟩set_rel)"
context
notes [fcomp_norm_unfold] = list_set_assn_def[symmetric]
notes [simp] = IS_LEFT_UNIQUE_def
begin
lemmas hnr_op_ls_empty = list_set_autoref_empty[of Id, sepref_param, unfolded autoref_import_set_unfolds,
FCOMP op_set_empty.fref[of "the_pure A"]]
lemmas hnr_mop_ls_empty = hnr_op_ls_empty[FCOMP mk_mop_rl0_np[OF mop_set_empty_alt]]
definition [simp]: "op_ls_empty = op_set_empty"
sepref_register op_ls_empty
lemmas hnr_op_ls_is_empty[sepref_fr_rules] = list_set_autoref_isEmpty[of Id, sepref_param, THEN APA, unfolded autoref_import_set_unfolds,
FCOMP op_set_is_empty.fref[of "the_pure A"]]
lemmas hnr_mop_ls_is_empty[sepref_fr_rules] = hnr_op_ls_is_empty[FCOMP mk_mop_rl1_np[OF mop_set_is_empty_alt]]
lemmas hnr_op_ls_member[sepref_fr_rules] = list_set_autoref_member[OF GEN_OP_EQ_Id, sepref_param, THEN APAlu, unfolded autoref_import_set_unfolds,
FCOMP op_set_member.fref[of "the_pure A"]]
lemmas hnr_mop_ls_member[sepref_fr_rules] = hnr_op_ls_member[FCOMP mk_mop_rl2_np[OF mop_set_member_alt]]
lemmas hnr_op_ls_insert[sepref_fr_rules] = list_set_autoref_insert[OF GEN_OP_EQ_Id, sepref_param, THEN APAru, unfolded autoref_import_set_unfolds,
FCOMP op_set_insert.fref[of "the_pure A"]]
lemmas hnr_mop_ls_insert[sepref_fr_rules] = hnr_op_ls_insert[FCOMP mk_mop_rl2_np[OF mop_set_insert_alt]]
lemmas hnr_op_ls_delete[sepref_fr_rules] = list_set_autoref_delete[OF GEN_OP_EQ_Id, sepref_param, THEN APAbu, unfolded autoref_import_set_unfolds,
FCOMP op_set_delete.fref[of "the_pure A"]]
lemmas hnr_mop_ls_delete[sepref_fr_rules] = hnr_op_ls_delete[FCOMP mk_mop_rl2_np[OF mop_set_delete_alt]]
text ‹Adapting this optimization from Autoref. ›
sepref_decl_op set_insert_dj: "insert" :: "[λ(x,s). x∉s]⇩f K ×⇩r ⟨K⟩set_rel → ⟨K⟩set_rel"
where "IS_RIGHT_UNIQUE K" "IS_LEFT_UNIQUE K" .
lemma fold_set_insert_dj: "Set.insert = op_set_insert_dj" by simp
lemma ls_insert_dj_hnr_aux:
"(uncurry (return oo Cons), uncurry mop_set_insert_dj) ∈ (pure Id)⇧k *⇩a (pure (⟨Id⟩list_set_rel))⇧k →⇩a pure (⟨Id⟩list_set_rel)"
using list_set_autoref_insert_dj[where R=Id,param_fo]
apply (sep_auto intro!: hfrefI hn_refineI simp: pure_def refine_pw_simps eintros del: exI)
apply force
done
lemmas ls_insert_dj_hnr[sepref_fr_rules] = ls_insert_dj_hnr_aux[THEN APAbu, FCOMP mop_set_insert_dj.fref[of "the_pure A"]]
lemmas ls_insert_dj_hnr_mop[sepref_fr_rules]
= ls_insert_dj_hnr[FCOMP mk_op_rl2[OF mop_set_insert_dj_alt]]
private lemma hd_in_set_conv: "hd l ∈ set l ⟷ l≠[]" by auto
lemma ls_pick_hnr_aux: "(return o hd, mop_set_pick) ∈ (pure (⟨Id⟩list_set_rel))⇧k →⇩a id_assn"
apply (sep_auto
intro!: hfrefI hn_refineI
simp: pure_def IS_PURE_def IS_ID_def list_set_rel_def refine_pw_simps
eintros del: exI)
apply (auto simp: in_br_conv hd_in_set_conv)
done
lemmas ls_pick_hnr[sepref_fr_rules] = ls_pick_hnr_aux[THEN APA,FCOMP mop_set_pick.fref[of "the_pure A"]]
lemma ls_pick_hnr_mop[sepref_fr_rules]: "CONSTRAINT is_pure A ⟹ (return ∘ hd, op_set_pick) ∈ [λs. s≠{}]⇩a list_set_assn⇧k → A"
using ls_pick_hnr
by (simp add: hfref_to_ASSERT_conv mop_set_pick_alt[abs_def])
end
end
interpretation ls: set_custom_empty "return []" op_ls_empty
by unfold_locales simp
lemmas [sepref_fr_rules] = hnr_op_ls_empty[folded op_ls_empty_def]
end
Theory Sepref_WGraph
section ‹Imperative Weighted Graphs›
theory Sepref_WGraph
imports
"../Sepref_ICF_Bindings"
Dijkstra_Shortest_Path.Graph
begin
type_synonym 'w graph_impl = "(('w×nat) list) Heap.array"
abbreviation (input) "node_rel ≡ nbn_rel"
abbreviation (input) "node_assn ≡ nbn_assn"
definition "is_graph n R G Gi ≡
∃⇩Al. Gi ↦⇩a l * ↑(
valid_graph G ∧
n = length l ∧
nodes G = {0..<length l} ∧
(∀v∈nodes G. (l!v, succ G v) ∈ ⟨R ×⇩r node_rel (length l)⟩list_set_rel)
)"
definition succi :: "'w::heap graph_impl ⇒ nat ⇒ ('w×nat) list Heap"
where "succi G v = do {
l ← Array.len G;
if v<l then do {
r ← Array.nth G v;
return r
} else return []
}"
lemma "
< is_graph n R G Gi * ↑(v∈nodes G) >
succi Gi v
< λr. is_graph n R G Gi * ↑((r,succ G v)∈⟨R ×⇩r node_rel n⟩list_set_rel) >"
unfolding is_graph_def succi_def
by sep_auto
lemma (in valid_graph) succ_no_node_empty: "v∉V ⟹ succ G v = {}"
unfolding succ_def using E_valid
by auto
lemma [sepref_fr_rules]: "
hn_refine
(hn_ctxt (is_graph n R) G Gi * hn_ctxt (node_assn n) v vi)
(succi Gi vi)
(hn_ctxt (is_graph n R) G Gi * hn_ctxt (node_assn n) v vi)
(pure (⟨R ×⇩r node_rel n⟩list_set_rel))
(RETURN$(succ$G$v))"
apply rule
unfolding hn_ctxt_def pure_def is_graph_def succi_def
by (sep_auto simp: valid_graph.succ_no_node_empty list_set_autoref_empty)
definition nodes_impl :: "'w::heap graph_impl ⇒ nat list Heap"
where "nodes_impl Gi ≡ do {
l ← Array.len Gi;
return [0..<l]
}"
lemma node_list_rel_id: "∀x∈set l. x<n ⟹ (l,l)∈⟨node_rel n⟩list_rel"
by (induction l) auto
lemma [sepref_fr_rules]: "hn_refine
(hn_ctxt (is_graph n R) G Gi)
(nodes_impl Gi)
(hn_ctxt (is_graph n R) G Gi)
(pure (⟨node_rel n⟩list_set_rel))
(RETURN$(nodes$G))"
apply rule
unfolding hn_ctxt_def pure_def is_graph_def nodes_impl_def
apply (sep_auto simp: list_set_rel_def br_def intro!: relcompI node_list_rel_id)
done
sepref_register succ nodes
definition cr_graph
:: "nat ⇒ (nat × nat × 'w) list ⇒ 'w::heap graph_impl Heap"
where
"cr_graph numV Es ≡ do {
a ← Array.new numV [];
a ← imp_nfoldli Es (λ_. return True) (λ(u,v,w) a. do {
l ← Array.nth a u;
let l = (w,v)#l;
a ← Array.upd u l a;
return a
}) a;
return a
}"
export_code cr_graph checking SML_imp
end
Theory Sepref_Chapter_Examples
chapter ‹Examples›
text ‹This chapter contains practical examples of using the IRF and IICF.
Moreover it contains some snippets that illustrate how to solve common tasks
like setting up custom datatypes or higher-order combinators.
›
theory Sepref_Chapter_Examples
imports Main
begin
end
Theory Sepref_Graph
section ‹Imperative Graph Representation›
theory Sepref_Graph
imports
"../Sepref"
"../Sepref_ICF_Bindings"
"../IICF/IICF"
begin
text ‹Graph Interface›
sepref_decl_intf 'a i_graph is "('a×'a) set"
definition op_graph_succ :: "('v×'v) set ⇒ 'v ⇒ 'v set"
where [simp]: "op_graph_succ E u ≡ E``{u}"
sepref_register op_graph_succ :: "'a i_graph ⇒ 'a ⇒ 'a set"
thm intf_of_assnI
lemma [pat_rules]: "((``))$E$(insert$u${}) ≡ op_graph_succ$E$u" by simp
definition [to_relAPP]: "graph_rel A ≡ ⟨A×⇩rA⟩set_rel"
text ‹Adjacency List Implementation›
lemma param_op_graph_succ[param]:
"⟦IS_LEFT_UNIQUE A; IS_RIGHT_UNIQUE A⟧ ⟹ (op_graph_succ, op_graph_succ) ∈ ⟨A⟩graph_rel → A → ⟨A⟩set_rel"
unfolding op_graph_succ_def[abs_def] graph_rel_def
by parametricity
context begin
private definition "graph_α1 l ≡ { (i,j). i<length l ∧ j∈l!i } "
private definition "graph_rel1 ≡ br graph_α1 (λ_. True)"
private definition "succ1 l i ≡ if i<length l then l!i else {}"
private lemma succ1_refine: "(succ1,op_graph_succ) ∈ graph_rel1 → Id → ⟨Id⟩set_rel"
by (auto simp: graph_rel1_def graph_α1_def br_def succ1_def split: if_split_asm intro!: ext)
private definition "assn2 ≡ array_assn (pure (⟨Id⟩list_set_rel))"
definition "adjg_assn A ≡ hr_comp (hr_comp assn2 graph_rel1) (⟨the_pure A⟩graph_rel)"
context
notes [sepref_import_param] = list_set_autoref_empty[folded op_set_empty_def]
notes [fcomp_norm_unfold] = adjg_assn_def[symmetric]
begin
sepref_definition succ2 is "(uncurry (RETURN oo succ1))" :: "(assn2⇧k*⇩aid_assn⇧k →⇩a pure (⟨Id⟩list_set_rel))"
unfolding succ1_def[abs_def] assn2_def
by sepref
lemma adjg_succ_hnr[sepref_fr_rules]: "⟦CONSTRAINT (IS_PURE IS_LEFT_UNIQUE) A; CONSTRAINT (IS_PURE IS_RIGHT_UNIQUE) A⟧
⟹ (uncurry succ2, uncurry (RETURN ∘∘ op_graph_succ)) ∈ (adjg_assn A)⇧k *⇩a A⇧k →⇩a pure (⟨the_pure A⟩list_set_rel)"
using succ2.refine[FCOMP succ1_refine, FCOMP param_op_graph_succ, simplified, of A]
by (simp add: IS_PURE_def list_set_rel_compp)
end
end
lemma [intf_of_assn]:
"intf_of_assn A (i::'I itself) ⟹ intf_of_assn (adjg_assn A) TYPE('I i_graph)" by simp
definition cr_graph
:: "nat ⇒ (nat × nat) list ⇒ nat list Heap.array Heap"
where
"cr_graph numV Es ≡ do {
a ← Array.new numV [];
a ← imp_nfoldli Es (λ_. return True) (λ(u,v) a. do {
l ← Array.nth a u;
let l = v#l;
a ← Array.upd u l a;
return a
}) a;
return a
}"
export_code cr_graph checking SML_imp
end
Theory Sepref_DFS
section ‹Simple DFS Algorithm›
theory Sepref_DFS
imports
"../Sepref"
Sepref_Graph
begin
text ‹
We define a simple DFS-algorithm, prove a simple correctness
property, and do data refinement to an efficient implementation.
›
subsection ‹Definition›
text ‹Recursive DFS-Algorithm.
‹E› is the edge relation of the graph, ‹vd› the node to
search for, and ‹v0› the start node.
Already explored nodes are stored in ‹V›.›
context
fixes E :: "'v rel" and v0 :: 'v and tgt :: "'v ⇒ bool"
begin
definition dfs :: "('v set × bool) nres" where
"dfs ≡ do {
(V,r) ← RECT (λdfs (V,v).
if v∈V then RETURN (V,False)
else do {
let V=insert v V;
if tgt v then
RETURN (V,True)
else
FOREACH⇩C (E``{v}) (λ(_,b). ¬b) (λv' (V,_). dfs (V,v')) (V,False)
}
) ({},v0);
RETURN (V,r)
}"
definition "reachable ≡ {v. (v0,v)∈E⇧*}"
definition "dfs_spec ≡ SPEC (λ(V,r). (r ⟷ reachable∩Collect tgt≠{}) ∧ (¬r ⟶ V=reachable))"
lemma dfs_correct:
assumes fr: "finite reachable"
shows "dfs ≤ dfs_spec"
proof -
have F: "⋀v. v∈reachable ⟹ finite (E``{v})"
using fr
apply (auto simp: reachable_def)
by (metis (mono_tags) Image_singleton Image_singleton_iff
finite_subset rtrancl.rtrancl_into_rtrancl subsetI)
define rpre where "rpre = (λS (V,v).
v∈reachable
∧ V⊆reachable
∧ S⊆V
∧ (V ∩ Collect tgt = {})
∧ E``(V-S) ⊆ V)"
define rpost where "rpost = (λS (V,v) (V',r).
(r⟷V'∩Collect tgt ≠ {})
∧ V⊆V'
∧ v∈V'
∧ V'⊆reachable
∧ (¬r ⟶ (E``(V'-S) ⊆ V')))
"
define fe_inv where "fe_inv = (λS V v it (V',r).
(r⟷V'∩Collect tgt ≠ {})
∧ insert v V⊆V'
∧ E``{v} - it ⊆ V'
∧ V'⊆reachable
∧ S⊆insert v V
∧ (¬r ⟶ E``(V'-S) ⊆ V' ∪ it ∧ E``(V'-insert v S) ⊆ V'))"
have vc_pre_initial: "rpre {} ({}, v0)"
by (auto simp: rpre_def reachable_def)
{
fix S V v
assume "rpre S (V,v)"
and "v∈V"
hence "rpost S (V,v) (V,False)"
unfolding rpre_def rpost_def
by auto
} note vc_node_visited = this
{
fix S V v
assume "tgt v"
and "rpre S (V,v)"
hence "rpost S (V,v) (insert v V, True)"
unfolding rpre_def rpost_def
by auto
} note vc_node_found = this
{
fix S V v
assume "rpre S (V, v)"
hence "finite (E``{v})"
unfolding rpre_def using F by (auto)
} note vc_foreach_finite = this
{
fix S V v
assume A: "v ∉ V" "¬tgt v"
and PRE: "rpre S (V, v)"
hence "fe_inv S V v (E``{v}) (insert v V, False)"
unfolding fe_inv_def rpre_def by (auto)
} note vc_enter_foreach = this
{
fix S V v v' it V'
assume A: "v ∉ V" "¬tgt v" "v' ∈ it" "it ⊆ E``{v}"
and FEI: "fe_inv S V v it (V', False)"
and PRE: "rpre S (V, v)"
from A have "v' ∈ E``{v}" by auto
moreover from PRE have "v ∈ reachable" by (auto simp: rpre_def)
hence "E``{v} ⊆ reachable" by (auto simp: reachable_def)
ultimately have [simp]: "v'∈reachable" by blast
have "rpre (insert v S) (V', v')"
unfolding rpre_def
using FEI PRE by (auto simp: fe_inv_def rpre_def) []
} note vc_rec_pre = this
{
fix S V V' v v' it Vr''
assume "fe_inv S V v it (V', False)"
and "rpost (insert v S) (V', v') Vr''"
hence "fe_inv S V v (it - {v'}) Vr''"
unfolding rpre_def rpost_def fe_inv_def
by clarsimp blast
} note vc_iterate_foreach = this
{
fix S V v V'
assume PRE: "rpre S (V, v)"
assume A: "v ∉ V" "¬tgt v"
assume FEI: "fe_inv S V v {} (V', False)"
have "rpost S (V, v) (V', False)"
unfolding rpost_def
using FEI by (auto simp: fe_inv_def) []
} note vc_foreach_completed_imp_post = this
{
fix S V v V' it
assume PRE: "rpre S (V, v)"
and A: "v ∉ V" "¬tgt v" "it ⊆ E``{v}"
and FEI: "fe_inv S V v it (V', True)"
hence "rpost S (V, v) (V', True)"
by (auto simp add: rpre_def rpost_def fe_inv_def) []
} note vc_foreach_interrupted_imp_post = this
{
fix V r
assume "rpost {} ({}, v0) (V, r)"
hence "(r ⟷ reachable ∩ Collect tgt ≠ {}) ∧ (¬r⟶V=reachable)"
by (auto
simp: rpost_def reachable_def
dest: Image_closed_trancl
intro: rev_ImageI)
} note vc_rpost_imp_spec = this
show ?thesis
unfolding dfs_def dfs_spec_def
apply (refine_rcg refine_vcg)
apply (rule order_trans)
apply(rule RECT_rule_arb[where
pre=rpre
and M="λa x. SPEC (rpost a x)"
and V="finite_psupset reachable <*lex*> {}"
])
apply refine_mono
apply (blast intro: fr)
apply (rule vc_pre_initial)
apply (refine_rcg refine_vcg
FOREACHc_rule'[where I="fe_inv S v s" for S v s]
)
apply (simp_all add: vc_node_visited vc_node_found)
apply (simp add: vc_foreach_finite)
apply (auto intro: vc_enter_foreach) []
apply (rule order_trans)
apply (rprems)
apply (erule (5) vc_rec_pre)
apply (auto simp add: fe_inv_def finite_psupset_def) []
apply (refine_rcg refine_vcg)
apply (simp add: vc_iterate_foreach)
apply (auto simp add: vc_foreach_completed_imp_post) []
apply (auto simp add: vc_foreach_interrupted_imp_post) []
apply (auto dest: vc_rpost_imp_spec) []
done
qed
end
lemma dfs_correct': "(uncurry2 dfs, uncurry2 dfs_spec)
∈ [λ((E,s),t). finite (reachable E s)]⇩f ((Id×⇩rId)×⇩rId) → ⟨Id⟩nres_rel"
apply (intro frefI nres_relI; clarsimp)
by (rule dfs_correct)
subsection ‹Refinement to Imperative/HOL›
text ‹We set up a schematic proof goal,
and use the sepref-tool to synthesize the implementation.
›
sepref_definition dfs_impl is
"uncurry2 dfs" :: "(adjg_assn nat_assn)⇧k*⇩anat_assn⇧k*⇩a(pure (nat_rel → bool_rel))⇧k →⇩a prod_assn (ias.assn nat_assn) bool_assn"
unfolding dfs_def[abs_def]
using [[goals_limit = 1]]
apply (rewrite in "RECT _ (⌑,_)" ias.fold_custom_empty)
apply (rewrite in "if ⌑ then RETURN (_,True) else _" fold_pho_apply)
apply sepref
done
export_code dfs_impl checking SML_imp
export_code dfs_impl in Haskell module_name DFS
text ‹Finally, correctness is shown by combining the
generated refinement theorem with the abstract correctness theorem.›
lemmas dfs_impl_correct' = dfs_impl.refine[FCOMP dfs_correct']
corollary dfs_impl_correct:
"finite (reachable E s) ⟹
<adjg_assn nat_assn E Ei>
dfs_impl Ei s tgt
< λ(Vi,r). ∃⇩AV. adjg_assn nat_assn E Ei * ias.assn nat_assn V Vi * ↑((r ⟷ reachable E s ∩ Collect tgt ≠ {}) ∧ (¬r ⟶ V=reachable E s) ) >⇩t"
using dfs_impl_correct'[THEN hfrefD, THEN hn_refineD, of "((E,s),tgt)" "((Ei,s),tgt)", simplified]
apply (rule cons_rule[rotated -1])
apply (sep_auto intro!: ent_ex_preI simp: dfs_spec_def pure_def)+
done
end
Theory Sepref_Dijkstra
section ‹Imperative Implementation of Dijkstra's Shortest Paths Algorithm›
theory Sepref_Dijkstra
imports
"../IICF/IICF"
"../Sepref_ICF_Bindings"
Dijkstra_Shortest_Path.Dijkstra
Dijkstra_Shortest_Path.Test
"HOL-Library.Code_Target_Numeral"
Sepref_WGraph
begin
instantiation infty :: (heap) heap
begin
instance
apply standard
apply (rule_tac x="λInfty ⇒ 0 | Num a ⇒ to_nat a + 1" in exI)
apply (rule injI)
apply (auto split: infty.splits)
done
end
fun infty_assn where
"infty_assn A (Num x) (Num y) = A x y"
| "infty_assn A Infty Infty = emp"
| "infty_assn _ _ _ = false"
text ‹Connection with ‹infty_rel››
lemma infty_assn_pure_conv: "infty_assn (pure A) = pure (⟨A⟩infty_rel)"
apply (intro ext)
subgoal for x y by (cases x; cases y; simp add: pure_def)
done
lemmas [sepref_import_rewrite, fcomp_norm_unfold, sepref_frame_normrel_eqs] =
infty_assn_pure_conv[symmetric]
lemmas [constraint_simps] = infty_assn_pure_conv
lemma infty_assn_pure[safe_constraint_rules]: "is_pure A ⟹ is_pure (infty_assn A)"
by (auto simp: is_pure_conv infty_assn_pure_conv)
lemma infty_assn_id[simp]: "infty_assn id_assn = id_assn"
by (simp add: infty_assn_pure_conv)
lemma [safe_constraint_rules]: "IS_BELOW_ID R ⟹ IS_BELOW_ID (⟨R⟩infty_rel)"
by (auto simp: infty_rel_def IS_BELOW_ID_def)
sepref_register Num Infty
lemma Num_hnr[sepref_fr_rules]: "(return o Num,RETURN o Num)∈A⇧d →⇩a infty_assn A"
by sepref_to_hoare sep_auto
lemma Infty_hnr[sepref_fr_rules]: "(uncurry0 (return Infty),uncurry0 (RETURN Infty))∈unit_assn⇧k →⇩a infty_assn A"
by sepref_to_hoare sep_auto
sepref_register case_infty
lemma [sepref_monadify_arity]: "case_infty ≡ λ⇩2f1 f2 x. SP case_infty$f1$(λ⇩2x. f2$x)$x"
by simp
lemma [sepref_monadify_comb]: "case_infty$f1$f2$x ≡ (⤜)$(EVAL$x)$(λ⇩2x. SP case_infty$f1$f2$x)" by simp
lemma [sepref_monadify_comb]: "EVAL$(case_infty$f1$(λ⇩2x. f2 x)$x)
≡ (⤜)$(EVAL$x)$(λ⇩2x. SP case_infty$(EVAL $ f1)$(λ⇩2x. EVAL $ f2 x)$x)"
apply (rule eq_reflection)
by (simp split: infty.splits)
lemma infty_assn_ctxt: "infty_assn A x y = z ⟹ hn_ctxt (infty_assn A) x y = z"
by (simp add: hn_ctxt_def)
lemma infty_cases_hnr[sepref_prep_comb_rule, sepref_comb_rules]:
fixes A e e'
defines [simp]: "INVe ≡ hn_invalid (infty_assn A) e e'"
assumes FR: "Γ ⟹⇩t hn_ctxt (infty_assn A) e e' * F"
assumes Infty: "⟦e = Infty; e' = Infty⟧ ⟹ hn_refine (hn_ctxt (infty_assn A) e e' * F) f1' (hn_ctxt XX1 e e' * Γ1') R f1"
assumes Num: "⋀x1 x1a. ⟦e = Num x1; e' = Num x1a⟧ ⟹ hn_refine (hn_ctxt A x1 x1a * INVe * F) (f2' x1a) (hn_ctxt A' x1 x1a * hn_ctxt XX2 e e' * Γ2') R (f2 x1)"
assumes MERGE2[unfolded hn_ctxt_def]: "Γ1' ∨⇩A Γ2' ⟹⇩t Γ'"
shows "hn_refine Γ (case_infty f1' f2' e') (hn_ctxt (infty_assn A') e e' * Γ') R (case_infty$f1$(λ⇩2x. f2 x)$e)"
apply (rule hn_refine_cons_pre[OF FR])
apply1 extract_hnr_invalids
apply (cases e; cases e'; simp add: infty_assn.simps[THEN infty_assn_ctxt])
subgoal
apply (rule hn_refine_cons[OF _ Infty _ entt_refl]; assumption?)
applyS (simp add: hn_ctxt_def)
apply (subst mult.commute, rule entt_fr_drop)
apply (rule entt_trans[OF _ MERGE2])
apply (simp add:)
done
subgoal
apply (rule hn_refine_cons[OF _ Num _ entt_refl]; assumption?)
applyS (simp add: hn_ctxt_def)
apply (rule entt_star_mono)
apply1 (rule entt_fr_drop)
applyS (simp add: hn_ctxt_def)
apply1 (rule entt_trans[OF _ MERGE2])
applyS (simp add:)
done
done
lemma hnr_val[sepref_fr_rules]: "(return o Weight.val,RETURN o Weight.val) ∈ [λx. x≠Infty]⇩a (infty_assn A)⇧d → A"
apply sepref_to_hoare
subgoal for x y by (cases x; cases y; sep_auto)
done
context
fixes A :: "'a::weight ⇒ 'b ⇒ assn"
fixes plusi
assumes GA[unfolded GEN_ALGO_def, sepref_fr_rules]: "GEN_ALGO plusi (λf. (uncurry f,uncurry (RETURN oo (+)))∈A⇧k*⇩aA⇧k →⇩a A)"
begin
sepref_thm infty_plus_impl is "uncurry (RETURN oo (+))" :: "((infty_assn A)⇧k *⇩a (infty_assn A)⇧k →⇩a infty_assn A)"
unfolding infty_plus_eq_plus[symmetric] infty_plus_def[abs_def]
by sepref
end
concrete_definition infty_plus_impl uses infty_plus_impl.refine_raw is "(uncurry ?impl,_)∈_"
lemmas [sepref_fr_rules] = infty_plus_impl.refine
definition infty_less where
"infty_less lt a b ≡ case (a,b) of (Num a, Num b) ⇒ lt a b | (Num _, Infty) ⇒ True | _ ⇒ False"
lemma infty_less_param[param]:
"(infty_less,infty_less) ∈ (R→R→bool_rel) → ⟨R⟩infty_rel → ⟨R⟩infty_rel → bool_rel"
unfolding infty_less_def[abs_def]
by parametricity
lemma infty_less_eq_less: "infty_less (<) = (<)"
unfolding infty_less_def[abs_def]
apply (clarsimp intro!: ext)
subgoal for a b by (cases a; cases b; auto)
done
context
fixes A :: "'a::weight ⇒ 'b ⇒ assn"
fixes lessi
assumes GA[unfolded GEN_ALGO_def, sepref_fr_rules]: "GEN_ALGO lessi (λf. (uncurry f,uncurry (RETURN oo (<)))∈A⇧k*⇩aA⇧k →⇩a bool_assn)"
begin
sepref_thm infty_less_impl is "uncurry (RETURN oo (<))" :: "((infty_assn A)⇧k *⇩a (infty_assn A)⇧k →⇩a bool_assn)"
unfolding infty_less_eq_less[symmetric] infty_less_def[abs_def]
by sepref
end
concrete_definition infty_less_impl uses infty_less_impl.refine_raw is "(uncurry ?impl,_)∈_"
lemmas [sepref_fr_rules] = infty_less_impl.refine
lemma param_mpath': "(mpath',mpath')
∈ ⟨⟨A×⇩r B ×⇩r A⟩list_rel ×⇩r B⟩option_rel → ⟨⟨A×⇩r B ×⇩r A⟩list_rel⟩option_rel"
proof -
have 1: "mpath' = map_option fst"
apply (intro ext, rename_tac x)
apply (case_tac x)
apply simp
apply (rename_tac a)
apply (case_tac a)
apply simp
done
show ?thesis
unfolding 1
by parametricity
qed
lemmas (in -) [sepref_import_param] = param_mpath'
lemma param_mpath_weight':
"(mpath_weight', mpath_weight') ∈ ⟨⟨A×⇩rB×⇩rA⟩list_rel ×⇩r B⟩option_rel → ⟨B⟩infty_rel"
by (auto elim!: option_relE simp: infty_rel_def top_infty_def)
lemmas [sepref_import_param] = param_mpath_weight'
context Dijkstra begin
lemmas impl_aux = mdijkstra_def[unfolded mdinit_def mpop_min_def mupdate_def]
lemma mdijkstra_correct:
"(mdijkstra, SPEC (is_shortest_path_map v0)) ∈ ⟨br αr res_invarm⟩nres_rel"
proof -
note mdijkstra_refines
also note dijkstra'_refines
also note dijkstra_correct
finally show ?thesis
by (rule nres_relI)
qed
end
locale Dijkstra_Impl = fixes w_dummy :: "'W::{weight,heap}"
begin
text ‹Weights›
sepref_register "0::'W"
lemmas [sepref_import_param] =
IdI[of "0::'W"]
abbreviation "weight_assn ≡ id_assn :: 'W ⇒ _"
lemma w_plus_param: "((+), (+)::'W⇒_) ∈ Id → Id → Id" by simp
lemma w_less_param: "((<), (<)::'W⇒_) ∈ Id → Id → Id" by simp
lemmas [sepref_import_param] = w_plus_param w_less_param
lemma [sepref_gen_algo_rules]:
"GEN_ALGO (return oo (+)) (λf. (uncurry f, uncurry (RETURN ∘∘ (+))) ∈ id_assn⇧k *⇩a id_assn⇧k →⇩a id_assn)"
"GEN_ALGO (return oo (<)) (λf. (uncurry f, uncurry (RETURN ∘∘ (<))) ∈ id_assn⇧k *⇩a id_assn⇧k →⇩a id_assn)"
by (sep_auto simp: GEN_ALGO_def pure_def intro!: hfrefI hn_refineI)+
lemma conv_prio_pop_min: "prio_pop_min m = do {
ASSERT (dom m ≠ {});
((k,v),m) ← mop_pm_pop_min id m;
RETURN (k,v,m)
}"
unfolding prio_pop_min_def mop_pm_pop_min_def
by (auto simp: pw_eq_iff refine_pw_simps ran_def)
end
context fixes N :: nat and w_dummy::"'W::{heap,weight}" begin
interpretation Dijkstra_Impl w_dummy .
definition "drmap_assn2 ≡ IICF_Sepl_Binding.iam.assn
(pure (node_rel N))
(prod_assn
(list_assn (prod_assn (pure (node_rel N)) (prod_assn weight_assn (pure (node_rel N)))))
weight_assn)
"
concrete_definition mdijkstra' uses Dijkstra.impl_aux
sepref_definition dijkstra_imp is "uncurry mdijkstra'"
:: "(is_graph N (Id::('W×'W) set))⇧k *⇩a (pure (node_rel N))⇧k →⇩a drmap_assn2"
unfolding mdijkstra'_def
apply (subst conv_prio_pop_min)
apply (rewrite in "RETURN (_,⌑)" iam.fold_custom_empty)
apply (rewrite hm_fold_custom_empty_sz[of N])
apply (rewrite in "_(_ ↦ (⌑,0))" HOL_list.fold_custom_empty)
unfolding drmap_assn2_def
using [[id_debug, goals_limit = 1]]
by sepref
export_code dijkstra_imp checking SML_imp
end
text ‹The main correctness theorem›
thm Dijkstra.mdijkstra_correct
lemma mdijkstra'_aref: "(uncurry mdijkstra',uncurry (SPEC oo weighted_graph.is_shortest_path_map))
∈ [λ(G,v0). Dijkstra G v0]⇩f Id×⇩rId → ⟨br Dijkstra.αr Dijkstra.res_invarm⟩nres_rel"
using Dijkstra.mdijkstra_correct
by (fastforce intro!: frefI simp: mdijkstra'.refine[symmetric])
definition "drmap_assn N ≡ hr_comp (drmap_assn2 N) (br Dijkstra.αr Dijkstra.res_invarm)"
context notes [fcomp_norm_unfold] = drmap_assn_def[symmetric] begin
theorem dijkstra_imp_correct: "(uncurry (dijkstra_imp N), uncurry (SPEC ∘∘ weighted_graph.is_shortest_path_map))
∈ [λ(G, v0). v0 ∈ nodes G ∧ (∀(v, w, v') ∈ edges G. 0 ≤ w)]⇩a (is_graph N Id)⇧k *⇩a (node_assn N)⇧k → drmap_assn N"
apply (rule hfref_weaken_pre'[OF _ dijkstra_imp.refine[FCOMP mdijkstra'_aref]])
proof clarsimp
fix G :: "(nat,'w::{weight,heap}) graph" and v0
assume v0_is_node: "v0 ∈ nodes G"
and nonneg_weights: "∀(v, w, v') ∈ edges G. 0 ≤ w"
and "v0<N"
and RDOM: "rdomp (is_graph N Id) G"
from RDOM interpret valid_graph G unfolding is_graph_def rdomp_def by auto
from RDOM have [simp]: "finite V" unfolding is_graph_def rdomp_def by auto
from RDOM have "∀v∈V. {(w, v'). (v, w, v') ∈ E} ∈
Range (⟨Id ×⇩r node_rel N⟩list_set_rel)"
by (auto simp: succ_def is_graph_def rdomp_def)
hence "∀v∈V. finite {(w, v'). (v, w, v') ∈ E}"
unfolding list_set_rel_range by simp
hence "finite (Sigma V (λv. {(w, v'). (v, w, v') ∈ E}))"
by auto
also have "E ⊆ (Sigma V (λv. {(w, v'). (v, w, v') ∈ E}))"
using E_valid
by auto
finally (finite_subset[rotated]) have [simp]: "finite E" .
show "Dijkstra G v0"
apply (unfold_locales)
unfolding is_graph_def using v0_is_node nonneg_weights
by auto
qed
end
corollary dijkstra_imp_rule: "
<is_graph n Id G Gi * ↑(v0 ∈ nodes G ∧ (∀(v, w, v') ∈ edges G. 0 ≤ w))>
dijkstra_imp n Gi v0
<λmi. (is_graph n Id) G Gi
* (∃⇩Am. drmap_assn n m mi * ↑(weighted_graph.is_shortest_path_map G v0 m)) >⇩t"
using dijkstra_imp_correct[to_hnr, of v0 G n v0 Gi]
unfolding hn_refine_def
apply (clarsimp)
apply (erule cons_rule[rotated -1])
apply (sep_auto simp: hn_ctxt_def pure_def is_graph_def)
apply (sep_auto simp: hn_ctxt_def)
done
end
Theory Sepref_NDFS
section ‹Imperative Implementation of of Nested DFS (HPY-Improvement)›
theory Sepref_NDFS
imports
"../Sepref"
Collections_Examples.Nested_DFS
Sepref_Graph
"HOL-Library.Code_Target_Numeral"
begin
sepref_decl_intf 'v i_red_witness is "'v list * 'v"
lemma id_red_witness[id_rules]:
"red_init_witness ::⇩i TYPE('v ⇒ 'v ⇒ 'v i_red_witness option)"
"prep_wit_red ::⇩i TYPE('v ⇒ 'v i_red_witness option ⇒ 'v i_red_witness option)"
by simp_all
definition
red_witness_rel_def_internal: "red_witness_rel R ≡ ⟨⟨R⟩list_rel,R⟩prod_rel"
lemma red_witness_rel_def: "⟨R⟩red_witness_rel ≡ ⟨⟨R⟩list_rel,R⟩prod_rel"
unfolding red_witness_rel_def_internal[abs_def] by (simp add: relAPP_def)
lemma red_witness_rel_sv[constraint_rules]:
"single_valued R ⟹ single_valued (⟨R⟩red_witness_rel)"
unfolding red_witness_rel_def
by tagged_solver
lemma [sepref_fr_rules]: "hn_refine
(hn_val R u u' * hn_val R v v')
(return (red_init_witness u' v'))
(hn_val R u u' * hn_val R v v')
(option_assn (pure (⟨R⟩red_witness_rel)))
(RETURN$(red_init_witness$u$v))"
apply simp
unfolding red_init_witness_def
apply rule
apply (sep_auto simp: hn_ctxt_def pure_def red_witness_rel_def)
done
lemma [sepref_fr_rules]: "hn_refine
(hn_val R u u' * hn_ctxt (option_assn (pure (⟨R⟩red_witness_rel))) w w')
(return (prep_wit_red u' w'))
(hn_val R u u' * hn_ctxt (option_assn (pure (⟨R⟩red_witness_rel))) w w')
(option_assn (pure (⟨R⟩red_witness_rel)))
(RETURN$(prep_wit_red$u$w))"
apply rule
apply (cases w)
apply (sep_auto simp: hn_ctxt_def pure_def red_witness_rel_def)
apply (cases w')
apply (sep_auto simp: hn_ctxt_def pure_def red_witness_rel_def)
apply (sep_auto simp: hn_ctxt_def pure_def red_witness_rel_def)
done
term red_dfs
sepref_definition red_dfs_impl is
"(uncurry2 (uncurry red_dfs))"
:: "(adjg_assn nat_assn)⇧k *⇩a (ias.assn nat_assn)⇧k *⇩a (ias.assn nat_assn)⇧d *⇩a nat_assn⇧k →⇩a UNSPEC"
unfolding red_dfs_def[abs_def]
using [[goals_limit = 1]]
by sepref
export_code red_dfs_impl checking SML_imp
declare red_dfs_impl.refine[sepref_fr_rules]
sepref_register red_dfs :: "'a i_graph ⇒ 'a set ⇒ 'a set ⇒ 'a
⇒ ('a set * 'a i_red_witness option) nres"
lemma id_init_wit_blue[id_rules]:
"init_wit_blue ::⇩i TYPE('a ⇒ 'a i_red_witness option ⇒ 'a blue_witness)"
by simp
lemma hn_blue_wit[sepref_import_param]:
"(NO_CYC,NO_CYC)∈blue_wit_rel"
"(prep_wit_blue,prep_wit_blue)∈nat_rel→blue_wit_rel→blue_wit_rel"
"((=),(=))∈blue_wit_rel→blue_wit_rel→bool_rel"
by simp_all
lemma hn_init_wit_blue[sepref_fr_rules]: "hn_refine
(hn_val nat_rel v v' * hn_ctxt (option_assn (pure (⟨nat_rel⟩red_witness_rel))) w w')
(return (init_wit_blue v' w'))
(hn_val nat_rel v v' * hn_ctxt (option_assn (pure (⟨nat_rel⟩red_witness_rel))) w w')
(pure blue_wit_rel)
(RETURN$(init_wit_blue$v$w))"
apply rule
apply (sep_auto simp: hn_ctxt_def pure_def)
apply (case_tac w, sep_auto)
apply (case_tac w', sep_auto, sep_auto simp: red_witness_rel_def)
done
lemma hn_extract_res[sepref_import_param]:
"(extract_res, extract_res) ∈ blue_wit_rel → Id"
by simp
thm red_dfs_impl.refine
sepref_definition blue_dfs_impl is "uncurry2 blue_dfs" :: "((adjg_assn nat_assn)⇧k*⇩a(ias.assn nat_assn)⇧k*⇩anat_assn⇧k→⇩aid_assn)"
unfolding blue_dfs_def[abs_def]
apply (rewrite in "RECT _ ⌑" ias.fold_custom_empty)+
using [[goals_limit = 1]]
by sepref
export_code blue_dfs_impl checking SML_imp
definition "blue_dfs_spec E A v0 ≡ SPEC (λr. case r of None ⇒ ¬ has_acc_cycle E A v0
| Some (v, pc, pv) ⇒ is_acc_cycle E A v0 v pv pc)"
lemma blue_dfs_correct': "(uncurry2 blue_dfs, uncurry2 blue_dfs_spec) ∈ [λ((E,A),v0). finite (E⇧*``{v0})]⇩f ((Id×⇩rId)×⇩rId) → ⟨Id⟩nres_rel"
apply (intro frefI nres_relI)
unfolding blue_dfs_spec_def apply clarsimp
apply (refine_vcg blue_dfs_correct)
done
lemmas blue_dfs_impl_correct' = blue_dfs_impl.refine[FCOMP blue_dfs_correct']
theorem blue_dfs_impl_correct:
fixes E
assumes "finite (E⇧*``{v0})"
shows "<ias.assn id_assn A A_impl * adjg_assn id_assn E succ_impl>
blue_dfs_impl succ_impl A_impl v0
<λr. ias.assn id_assn A A_impl * adjg_assn id_assn E succ_impl
* ↑(
case r of None ⇒ ¬has_acc_cycle E A v0
| Some (v,pc,pv) ⇒ is_acc_cycle E A v0 v pv pc
)>⇩t"
using blue_dfs_impl_correct'[THEN hfrefD, THEN hn_refineD, of "((E,A),v0)" "((succ_impl,A_impl),v0)", simplified]
apply (rule cons_rule[rotated -1])
using assms
by (sep_auto simp: blue_dfs_spec_def pure_def)+
text ‹ We tweak the initialization vector of the outer DFS,
to allow pre-initialization of the size of the array-lists.
When set to the number of nodes, array-lists will never be resized
during the run, which saves some time. ›
context
fixes N :: nat
begin
lemma testsuite_blue_dfs_modify:
"({}::nat set, {}::nat set, {}::nat set, s)
= (op_ias_empty_sz N, op_ias_empty_sz N, op_ias_empty_sz N, s)"
by simp
sepref_definition blue_dfs_impl_sz is "uncurry2 blue_dfs" :: "((adjg_assn nat_assn)⇧k*⇩a(ias.assn nat_assn)⇧k*⇩anat_assn⇧k→⇩aid_assn)"
unfolding blue_dfs_def[abs_def]
apply (rewrite in "RECT _ ⌑" testsuite_blue_dfs_modify)
using [[goals_limit = 1]]
by sepref
export_code blue_dfs_impl_sz checking SML_imp
end
lemmas blue_dfs_impl_sz_correct' = blue_dfs_impl_sz.refine[FCOMP blue_dfs_correct']
term blue_dfs_impl_sz
theorem blue_dfs_impl_sz_correct:
fixes E
assumes "finite (E⇧*``{v0})"
shows "<ias.assn id_assn A A_impl * adjg_assn id_assn E succ_impl>
blue_dfs_impl_sz N succ_impl A_impl v0
<λr. ias.assn id_assn A A_impl * adjg_assn id_assn E succ_impl
* ↑(
case r of None ⇒ ¬has_acc_cycle E A v0
| Some (v,pc,pv) ⇒ is_acc_cycle E A v0 v pv pc
)>⇩t"
using blue_dfs_impl_sz_correct'[THEN hfrefD, THEN hn_refineD, of "((E,A),v0)" "((succ_impl,A_impl),v0)", simplified]
apply (rule cons_rule[rotated -1])
using assms
by (sep_auto simp: blue_dfs_spec_def pure_def)+
end
Theory Sepref_Minitests
section ‹Miscellaneous Tests›
theory Sepref_Minitests
imports
"../IICF/IICF"
Sepref_Graph
"HOL-Library.Code_Target_Numeral"
begin
definition [simp]: "mop_plus = RETURN oo (+)"
definition [simp]: "mop_plusi = return oo (+)"
lemma [sepref_fr_rules]: "(uncurry mop_plusi,uncurry mop_plus) ∈ nat_assn⇧k*⇩anat_assn⇧k →⇩a nat_assn"
by (sep_auto intro!: hfrefI hn_refineI simp: pure_def)
sepref_register mop_plus
sepref_definition copy_test is "(λx. do {
let y = x+ x;
mop_plus y y
})" :: "((nat_assn)⇧k →⇩a UNSPEC)"
by sepref
definition "bar s ≡ do {
x←RETURN (insert (1::nat) s);
y←RETURN (insert (1::nat) x);
ASSERT (y≠{});
if 1∈y then
RETURN (y)
else RETURN (insert (1::nat) y)
}"
definition "bar2 s ≡ do {
if (1::nat)∈s then
RETURN True
else RETURN False
}"
definition "bar' ≡ do {
y ← RETURN {1,1::nat};
if 1∈y then
RETURN (y)
else RETURN (insert 1 y)
}"
definition "foo ≡ do {
s ← RETURN [1,1,1::nat];
y ← RETURN ({}::nat set);
RECT (λD l.
case l of
[] ⇒ RETURN (case [0,1] of [] ⇒ {} | x#xs ⇒ {x})
| x#l ⇒ do {
r ← D l;
RETURN (if x<1 then insert x r else insert (x+1) r)
}) s
}
"
definition "simple_rec ≡ do {
RECT (λD l. case l of
[] ⇒ RETURN 0
| x#xs ⇒ do {
a←D xs;
RETURN (a+x)
}
) [1,0::nat]
}"
definition "simple_while ≡ do {
WHILEIT (λ(i,m). i ∉ dom m) (λ(i,m). i≥1) (λ(i,m). do {
let i=i+1;
RETURN (i,m)
}) (10::nat, Map.empty::nat ⇀ nat)
}"
definition "lst_mem_to_sets ≡ do {
l←RETURN [0,1,0::nat];
RECT (λD l.
case l of
[] ⇒ RETURN []
| x#l ⇒ do {
r ← D l;
RETURN ({x}#r)
}) l
}
"
definition "lst_mem_to_sets_nonlin ≡ do {
l←RETURN [0,1,0::nat];
RECT (λD l.
case l of
[] ⇒ RETURN []
| x#l ⇒ do {
r ← D l;
RETURN ({x,x}#r)
}) l
}
"
definition "lst_mem_to_sets_nonlin2 ≡ do {
l←RETURN [0,1,0::nat];
RECT (λD l.
case l of
[] ⇒ RETURN []
| x#l ⇒ do {
r ← D l;
RETURN ({x}#r@r)
}) l
}
"
definition "lst_nonlin ≡ do {
l←RETURN [0::nat];
RETURN (case l of [] ⇒ l | x#xs ⇒ x#l)
}"
definition "lst_nonlin2 ≡ do {
l←RETURN [0::nat];
RETURN (case l of [] ⇒ [] | x#xs ⇒ x#(x#xs))
}"
definition "lst_nonlin3 ≡ do {
l←RETURN [{0::nat}];
RETURN (case l of [] ⇒ [] | x#xs ⇒ x#(x#xs))
}"
definition "lst_nonlin4 ≡ do {
l←RETURN [{0::nat}];
RETURN (l@l)
}"
definition "dup_arg == do {
x <- RETURN [1::nat];
RETURN (x@x)
}"
definition "big_list == RETURN [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1::nat]"
definition "big_list2 == do {
x1 <- RETURN ({}::nat set);
x2 <- RETURN {};
x3 <- RETURN {};
x4 <- RETURN {};
x5 <- RETURN {};
x6 <- RETURN {};
x7 <- RETURN {};
x8 <- RETURN {};
RETURN [x1,x2,x3,x4,x5,x6,x7,x8]
}"
term Set.insert
definition "foo1 ≡
case [] of
[] ⇒ RETURN {}
| x#l ⇒ do {
r ← RETURN ({}::nat set);
RETURN (if x<1 then insert x r else insert x r)
}
"
definition "basic_foreach ≡ do {
FOREACH⇩C {0,1::nat} (λs. s>1) (λx s. RETURN (x+s)) 0
}"
definition "basic_foreach2 ≡ do {
FOREACH⇩C {0,1::nat} (λ_. True) (λx s. RETURN (insert x s)) {}
}"
definition "basic_option ≡ do {
let a={};
let b=Some a;
let c=Some (0::nat);
let d=Some (1::nat);
RETURN (b,c=d)
}"
definition dfs :: "(('a×'a) set) ⇒ 'a ⇒ 'a ⇒ ('a set × bool) nres"
where
"⋀E vd v0. dfs E vd v0 ≡ REC⇩T (λD (V,v).
if v=vd then RETURN (V,True)
else if v∈V then RETURN (V,False)
else do {
let V=insert v V;
FOREACH⇩C (E``{v}) (λ(_,b). b=False) (λv' (V,_). D (V,v')) (V,False) }
) ({},v0)"
lemma ID_unfold_vars: "ID x y T ⟹ x≡y" by simp
schematic_goal testsuite_basic_param:
fixes s
notes [id_rules] =
itypeI[Pure.of s "TYPE(nat set)"]
shows
"hn_refine (emp * hn_ctxt (hs.assn id_assn) s s') (?c1::?'c1 Heap) ?Γ1' ?R1 (bar s)"
"hn_refine (emp * hn_ctxt (hs.assn id_assn) s s') (?c2::?'c2 Heap) ?Γ2' ?R2 (bar2 s)"
unfolding bar_def bar2_def
using [[id_debug]]
by sepref+
term case_list
thm id_rules
lemmas [id_rules] =
itypeI[Pure.of RECT "TYPE ((('a ⇒ 'b) ⇒ 'a ⇒ 'b) ⇒ 'a ⇒ 'b)"]
itypeI[Pure.of case_list "TYPE('a ⇒ ('b ⇒ 'b list ⇒ 'a) ⇒ 'b list ⇒ 'a)"]
ML ‹
fun is_eta_norm t = t aconv (Envir.eta_contract t)
fun find_not_eta_norm (a$b) = (find_not_eta_norm a @ find_not_eta_norm b)
| find_not_eta_norm (t as Abs (_,_,t'$Bound 0)) = t :: find_not_eta_norm t'
| find_not_eta_norm (Abs (_,_,t)) = find_not_eta_norm t
| find_not_eta_norm _ = []
fun is_eta_norm_tac st = if is_eta_norm (Thm.prop_of st) then Seq.single st
else (raise TERM ("¬eta-norm",find_not_eta_norm (Thm.prop_of st)))
›
definition "xfoo ≡ do {
s ← RETURN [1::nat];
y ← RETURN ({}::nat set);
RECT (λD l.
case l of
[] ⇒ RETURN ({0})
| x#l ⇒ do {
r ← D l;
RETURN (insert x r)
}) s
}
"
schematic_goal testsuite_basic1:
notes [sepref_fr_rules] = HOL_list_empty_hnr hs.hnr_op_empty[of nat_assn]
shows "hn_refine emp (?c1::?'c1 Heap) ?Γ1' ?R1 bar'"
and "hn_refine emp (?c2::?'c2 Heap) ?Γ2' ?R2 foo"
and "hn_refine emp (?c3::?'c3 Heap) ?Γ3' ?R3 simple_rec"
and "hn_refine emp (?c4::?'c4 Heap) ?Γ4' ?R4 lst_mem_to_sets"
and "hn_refine emp (?c5::?'c5 Heap) ?Γ5' ?R5 lst_mem_to_sets_nonlin"
and "hn_refine emp (?c7::?'c7 Heap) ?Γ7' ?R7 lst_nonlin"
and "hn_refine emp (?c8::?'c8 Heap) ?Γ8' ?R8 lst_nonlin2"
unfolding bar'_def foo_def simple_rec_def lst_mem_to_sets_def
lst_mem_to_sets_nonlin_def lst_mem_to_sets_nonlin2_def
lst_nonlin_def lst_nonlin2_def lst_nonlin3_def lst_nonlin4_def
using [[goals_limit = 1]]
apply sepref+
done
schematic_goal testsuite_basic2:
notes [sepref_fr_rules] = HOL_list_empty_hnr hs.hnr_op_empty hm.empty_hnr
shows "hn_refine emp (?c1::?'c1 Heap) ?Γ1' ?R1 dup_arg"
and "hn_refine emp (?c2::?'c2 Heap) ?Γ2' ?R2 big_list"
and "hn_refine emp (?c3::?'c3 Heap) ?Γ3' ?R3 big_list2"
and "hn_refine emp (?c4::?'c4 Heap) ?Γ4' ?R4 foo1"
and "hn_refine emp (?c5::?'c5 Heap) ?Γ5' ?R5 basic_foreach"
and "hn_refine emp (?c6::?'c6 Heap) ?Γ6' ?R6 basic_foreach2"
and "hn_refine emp (?c7::?'c7 Heap) ?Γ7' ?R7 basic_option"
and "hn_refine emp (?c8::?'c8 Heap) ?Γ8' ?R8 simple_while"
unfolding dup_arg_def big_list_def big_list2_def foo1_def
basic_foreach_def basic_foreach2_def simple_while_def
basic_option_def
using [[goals_limit = 1, id_debug]]
apply sepref+
done
sepref_definition imp_dfs is "uncurry2 dfs" :: "((adjg_assn nat_assn)⇧k *⇩a nat_assn⇧k *⇩a nat_assn⇧k →⇩a prod_assn (hs.assn nat_assn) bool_assn)"
unfolding dfs_def[abs_def]
apply (rewrite in "FOREACHc ⌑" op_graph_succ_def[symmetric])
apply (rewrite in "(⌑,_)" hs.fold_custom_empty)
using [[goals_limit = 1]]
by sepref
export_code imp_dfs checking SML_imp
definition "simple_algo a c m x = do {
let s = {m};
RECT (λD (x,s,l).
if x∈s then RETURN l
else D ((a*x+c) mod m,insert x s,l+1)
) (x::nat,s,0::nat)
}"
schematic_goal sa_impl:
notes [autoref_tyrel] = ty_REL[where 'a = "nat set"
and R="⟨nat_rel⟩iam_set_rel"]
assumes [autoref_rules]: "(a,a)∈nat_rel"
assumes [autoref_rules]: "(c,c)∈nat_rel"
assumes [autoref_rules]: "(m,m)∈nat_rel"
assumes [autoref_rules]: "(x,x)∈nat_rel"
shows "(?c::?'c,simple_algo a c m x)∈?R"
unfolding simple_algo_def[abs_def]
using [[autoref_trace_failed_id]]
apply autoref_monadic
done
concrete_definition sa_impl uses sa_impl
prepare_code_thms sa_impl_def
export_code sa_impl checking SML
sepref_definition sai_impl is
"(uncurry2 (uncurry simple_algo))"
:: "(nat_assn⇧k*⇩anat_assn⇧k*⇩anat_assn⇧k*⇩anat_assn⇧k →⇩a nat_assn)"
unfolding simple_algo_def[abs_def]
unfolding ias.fold_custom_empty
using [[goals_limit = 1]]
using [[id_debug]]
by sepref
export_code sai_impl checking SML
term Array.upd
definition "sad_impl a c m x ≡ do {
s←Array.new m False;
heap.fixp_fun (λD (x,s,l). do {
brk←Array.nth s x;
if brk then return l
else do {
_←Array.len s;
_←if x<l then return True else return False;
s←Array.upd x True s;
D ((a*x+c) mod m,s,l+1)
}
}) (x,s,0::nat)
}"
definition "sad_impl2 a c m x ≡ do {
s←Array.new m False;
heap.fixp_fun (λD (x,l). do {
brk←Array.nth s x;
if brk then return l
else do {
Array.upd x True s;
D ((a*x+c) mod m,l+1)
}
}) (x,0::nat)
}"
prepare_code_thms sad_impl_def
prepare_code_thms sad_impl2_def
code_thms sai_impl
lemma
"ias_ins k a = do {
l←Array.len a;
if k<l then
Array.upd k True a
else do {
let newsz = max (k+1) (2 * l + 3);
a←Array_Blit.array_grow a newsz False;
Array.upd k True a
}
}"
unfolding ias_ins_def
apply (fo_rule cong[OF arg_cong])
apply (auto)
done
export_code sa_impl sad_impl sad_impl2 sai_impl
checking SML_imp
schematic_goal
shows "hn_refine emp (?c1::?'c1 Heap) ?Γ1' ?R1
(do {
let x=(1::nat);
RETURN {x,x}
})"
apply (rewrite in "RETURN ⌑" hs.fold_custom_empty)
apply sepref
done
term hn_invalid
definition "remdup l ≡
RECT (λremdup. λ(
[],s) ⇒ RETURN op_HOL_list_empty
| (x#xs,s) ⇒ if x∈s then
remdup (xs,s )
else do {
l ← remdup (xs, insert x s);
RETURN (x#l)
}
) (l,op_hs_empty)
"
schematic_goal
fixes l :: "nat list"
notes [id_rules] = itypeI[Pure.of l "TYPE(nat list)"]
shows "hn_refine ( (hn_ctxt (list_assn (pure Id))) l li) (?c::?'c Heap) ?Γ ?R (remdup l)"
unfolding remdup_def
using [[id_debug]]
by sepref
text ‹Test structural frame-inference and merging (on product type)›
definition "smart_match_test1 ≡ λ(p1,p2). RETURN (p1+p2)"
sepref_definition smart_match_test1_impl is "smart_match_test1" :: "((prod_assn nat_assn nat_assn)⇧k →⇩a nat_assn)"
unfolding smart_match_test1_def
by sepref
sepref_register smart_match_test1
lemmas [sepref_fr_rules] = smart_match_test1_impl.refine
definition "smart_match_test2 ≡ do {
let p = (2::nat,2::nat);
f ← if True then
case p of (a,b) ⇒ RETURN (Some b)
else
case p of (a,b) ⇒ RETURN (Some a);
smart_match_test1 p
}"
sepref_thm smart_match_test2_impl is "uncurry0 smart_match_test2" :: "unit_assn⇧k →⇩a nat_assn"
unfolding smart_match_test2_def
by sepref
sepref_thm regr_incomplete_monadify is "RETURN o (λl. fold (λx. (#) (case x of (x, xa) ⇒ x + xa)) l [])" :: "(list_assn (prod_assn nat_assn nat_assn))⇧k →⇩a list_assn nat_assn"
unfolding test_def[abs_def] "HOL_list.fold_custom_empty"
by sepref
end
Theory Worklist_Subsumption
section ‹Generic Worklist Algorithm with Subsumption›
theory Worklist_Subsumption
imports "../Sepref"
begin
subsection ‹Utilities›
definition take_from_set where
"take_from_set s = ASSERT (s ≠ {}) ⪢ SPEC (λ (x, s'). x ∈ s ∧ s' = s - {x})"
lemma take_from_set_correct:
assumes "s ≠ {}"
shows "take_from_set s ≤ SPEC (λ (x, s'). x ∈ s ∧ s' = s - {x})"
using assms unfolding take_from_set_def by simp
lemmas [refine_vcg] = take_from_set_correct[THEN order.trans]
definition take_from_mset where
"take_from_mset s = ASSERT (s ≠ {#}) ⪢ SPEC (λ (x, s'). x ∈# s ∧ s' = s - {#x#})"
lemma take_from_mset_correct:
assumes "s ≠ {#}"
shows "take_from_mset s ≤ SPEC (λ (x, s'). x ∈# s ∧ s' = s - {#x#})"
using assms unfolding take_from_mset_def by simp
lemmas [refine_vcg] = take_from_mset_correct[THEN order.trans]
lemma set_mset_mp: "set_mset m ⊆ s ⟹ n < count m x ⟹ x∈s"
by (meson count_greater_zero_iff le_less_trans subsetCE zero_le)
lemma pred_not_lt_is_zero: "(¬ n - Suc 0 < n) ⟷ n=0" by auto
subsection ‹Search Spaces›
text ‹
A search space consists of a step relation, a start state,
a final state predicate, and a subsumption preorder.
›
locale Search_Space_Defs =
fixes E :: "'a ⇒ 'a ⇒ bool"
and a⇩0 :: 'a
and F :: "'a ⇒ bool"
and subsumes :: "'a ⇒ 'a ⇒ bool" (infix "≼" 50)
begin
definition reachable where
"reachable = E⇧*⇧* a⇩0"
definition "F_reachable ≡ ∃a. reachable a ∧ F a"
end
text ‹The set of reachable states must be finite,
subsumption must be a preorder, and be compatible with steps and final states.›
locale Search_Space = Search_Space_Defs +
assumes finite_reachable: "finite {a. reachable a}"
assumes refl[intro!, simp]: "a ≼ a"
and trans[trans]: "a ≼ b ⟹ b ≼ c ⟹ a ≼ c"
assumes mono: "a ≼ b ⟹ E a a' ⟹ reachable a ⟹ reachable b ⟹ ∃ b'. E b b' ∧ a' ≼ b'"
and F_mono: "a ≼ a' ⟹ F a ⟹ F a'"
begin
lemma start_reachable[intro!, simp]:
"reachable a⇩0"
unfolding reachable_def by simp
lemma step_reachable:
assumes "reachable a" "E a a'"
shows "reachable a'"
using assms unfolding reachable_def by simp
lemma finitely_branching:
assumes "reachable a"
shows "finite (Collect (E a))"
by (metis assms finite_reachable finite_subset mem_Collect_eq step_reachable subsetI)
end
subsection ‹Worklist Algorithm›
term card
context Search_Space_Defs begin
definition "worklist_var = inv_image (finite_psupset (Collect reachable) <*lex*> measure size) (λ (a, b,c). (a,b))"
definition "worklist_inv_frontier passed wait =
(∀ a ∈ passed. ∀ a'. E a a' ⟶ (∃ b' ∈ passed ∪ set_mset wait. a' ≼ b'))"
definition "start_subsumed passed wait = (∃ a ∈ passed ∪ set_mset wait. a⇩0 ≼ a)"
definition "worklist_inv ≡ λ (passed, wait, brk).
passed ⊆ Collect reachable ∧
(brk ⟶ (∃ f. reachable f ∧ F f)) ∧
(¬ brk ⟶
worklist_inv_frontier passed wait
∧ (∀ a ∈ passed ∪ set_mset wait. ¬ F a)
∧ start_subsumed passed wait
∧ set_mset wait ⊆ Collect reachable)
"
definition "add_succ_spec wait a ≡ SPEC (λ(wait',brk).
if ∃a'. E a a' ∧ F a' then
brk
else set_mset wait' = set_mset wait ∪ {a' . E a a'} ∧ ¬brk
)"
definition worklist_algo where
"worklist_algo = do
{
if F a⇩0 then RETURN True
else do {
let passed = {};
let wait = {#a⇩0#};
(passed, wait, brk) ← WHILEIT worklist_inv (λ (passed, wait, brk). ¬ brk ∧ wait ≠ {#})
(λ (passed, wait, brk). do
{
(a, wait) ← take_from_mset wait;
ASSERT (reachable a);
if (∃ a' ∈ passed. a ≼ a') then RETURN (passed, wait, brk) else
do
{
(wait,brk) ← add_succ_spec wait a;
let passed = insert a passed;
RETURN (passed, wait, brk)
}
}
)
(passed, wait, False);
RETURN brk
}
}
"
end
subsubsection ‹Correctness Proof›
context Search_Space begin
lemma wf_worklist_var:
"wf worklist_var"
unfolding worklist_var_def by (auto simp: finite_reachable)
context
begin
private lemma aux1:
assumes "∀x∈passed. ¬ a ≼ x"
and "passed ⊆ Collect reachable"
and "reachable a"
shows "
((insert a passed, wait', brk'),
passed, wait, brk)
∈ worklist_var"
proof -
from assms have "a ∉ passed" by auto
with assms(2,3) show ?thesis
by (auto simp: worklist_inv_def worklist_var_def finite_psupset_def)
qed
private lemma aux2:
assumes
"a' ∈ passed"
"a ≼ a'"
"a ∈# wait"
"worklist_inv_frontier passed wait"
shows "worklist_inv_frontier passed (wait - {#a#})"
using assms unfolding worklist_inv_frontier_def
using trans
apply clarsimp
by (metis (no_types, lifting) Un_iff count_eq_zero_iff count_single mset_contains_eq mset_un_cases)
private lemma aux5:
assumes
"a' ∈ passed"
"a ≼ a'"
"a ∈# wait"
"start_subsumed passed wait"
shows "start_subsumed passed (wait - {#a#})"
using assms unfolding start_subsumed_def apply clarsimp
by (metis Un_iff insert_DiffM2 local.trans mset_right_cancel_elem)
private lemma aux3:
assumes
"set_mset wait ⊆ Collect reachable"
"a ∈# wait"
"set_mset wait' = set_mset (wait - {#a#}) ∪ Collect (E a)"
"worklist_inv_frontier passed wait"
shows "worklist_inv_frontier (insert a passed) wait'"
proof -
from assms(1,2) have "reachable a"
by (simp add: subset_iff)
with finitely_branching have [simp, intro!]: "finite (Collect (E a))" .
from assms(2,3,4) show ?thesis unfolding worklist_inv_frontier_def
by (metis Un_iff insert_DiffM insert_iff local.refl mem_Collect_eq set_mset_add_mset_insert)
qed
private lemma aux6:
assumes
"a ∈# wait"
"start_subsumed passed wait"
"set_mset wait' = set_mset (wait - {#a#}) ∪ Collect (E a)"
shows "start_subsumed (insert a passed) wait'"
using assms unfolding start_subsumed_def
by (metis Un_iff insert_DiffM insert_iff set_mset_add_mset_insert)
lemma aux4:
assumes "worklist_inv_frontier passed {#}" "reachable x" "start_subsumed passed {#}"
"passed ⊆ Collect reachable"
shows "∃ x' ∈ passed. x ≼ x'"
proof -
from ‹reachable x› have "E⇧*⇧* a⇩0 x" by (simp add: reachable_def)
from assms(3) obtain b where "a⇩0 ≼ b" "b ∈ passed" unfolding start_subsumed_def by auto
have "∃x'. ∃ x''. E⇧*⇧* b x' ∧ x ≼ x' ∧ x' ≼ x'' ∧ x'' ∈ passed" if
"E⇧*⇧* a x" "a ≼ b" "b ≼ b'" "b' ∈ passed"
"reachable a" "reachable b" for a b b'
using that proof (induction arbitrary: b b' rule: converse_rtranclp_induct)
case base
then show ?case by auto
next
case (step a a1 b b')
from ‹E a a1› ‹a ≼ b› ‹reachable a› ‹reachable b› obtain b1 where
"E b b1" "a1 ≼ b1"
using mono by blast
then obtain b1' where "E b' b1'" "b1 ≼ b1'" using assms(4) mono step.prems by blast
with ‹b' ∈ passed› assms(1) obtain b1'' where "b1'' ∈ passed" "b1' ≼ b1''"
unfolding worklist_inv_frontier_def by auto
with ‹b1 ≼ _› have "b1 ≼ b1''" using trans by blast
with step.IH[OF ‹a1 ≼ b1› this ‹b1'' ∈ passed›] ‹reachable a› ‹E a a1› ‹reachable b› ‹E b b1›
obtain x' x'' where
"E⇧*⇧* b1 x'" "x ≼ x'" "x' ≼ x''" "x'' ∈ passed"
by (auto intro: step_reachable)
moreover from ‹E b b1› ‹E⇧*⇧* b1 x'› have "E⇧*⇧* b x'" by auto
ultimately show ?case by auto
qed
from this[OF ‹E⇧*⇧* a⇩0 x› ‹a⇩0 ≼ b› refl ‹b ∈ _›] assms(4) ‹b ∈ passed› show ?thesis
by (auto intro: trans)
qed
theorem worklist_algo_correct:
"worklist_algo ≤ SPEC (λ brk. brk ⟷ F_reachable)"
proof -
note [simp] = size_Diff_submset pred_not_lt_is_zero
note [dest] = set_mset_mp
show ?thesis
unfolding worklist_algo_def add_succ_spec_def F_reachable_def
apply (refine_vcg wf_worklist_var)
apply (auto; fail) []
apply (auto simp: worklist_inv_def worklist_inv_frontier_def start_subsumed_def; fail)
apply (simp; fail)
apply (auto simp: worklist_inv_def; fail)
apply (auto simp: worklist_inv_def aux2 aux5
dest: in_diffD
split: if_split_asm; fail) []
apply (auto simp: worklist_inv_def worklist_var_def intro: finite_subset[OF _ finite_reachable]; fail)
apply (clarsimp split: if_split_asm)
apply (clarsimp simp: worklist_inv_def; blast intro: step_reachable; fail)
apply (auto
simp: worklist_inv_def step_reachable aux3 aux6 finitely_branching
dest: in_diffD; fail)[]
apply (auto simp: worklist_inv_def aux1; fail)
using F_mono apply (fastforce simp: worklist_inv_def dest!: aux4)
done
qed
lemmas [refine_vcg] = worklist_algo_correct[THEN order_trans]
end
end
subsection ‹Towards an Implementation›
locale Worklist1_Defs = Search_Space_Defs +
fixes succs :: "'a ⇒ 'a list"
locale Worklist1 = Worklist1_Defs + Search_Space +
assumes succs_correct: "reachable a ⟹ set (succs a) = Collect (E a)"
begin
definition "add_succ1 wait a ≡ nfoldli (succs a) (λ(_,brk). ¬brk) (λa (wait,brk). if F a then RETURN (wait,True) else RETURN (wait + {#a#},False)) (wait, False)"
lemma add_succ1_ref[refine]: "⟦(wait,wait')∈Id; (a,a')∈b_rel Id reachable⟧ ⟹ add_succ1 wait a ≤ ⇓(Id ×⇩r bool_rel) (add_succ_spec wait' a')"
apply simp
unfolding add_succ_spec_def add_succ1_def
apply (refine_vcg nfoldli_rule[where I = "λl1 _ (wait',brk). if brk then ∃a'. E a a' ∧ F a' else set_mset wait' = set_mset wait ∪ set l1 ∧ set l1 ∩ Collect F = {}"])
apply (auto; fail)
using succs_correct[of a] apply (auto; fail)
using succs_correct[of a] apply (auto; fail)
apply (auto; fail)
using succs_correct[of a] apply (auto; fail)
done
definition worklist_algo1 where
"worklist_algo1 = do
{
if F a⇩0 then RETURN True
else do {
let passed = {};
let wait = {#a⇩0#};
(passed, wait, brk) ← WHILEIT worklist_inv (λ (passed, wait, brk). ¬ brk ∧ wait ≠ {#})
(λ (passed, wait, brk). do
{
(a, wait) ← take_from_mset wait;
if (∃ a' ∈ passed. a ≼ a') then RETURN (passed, wait, brk) else
do
{
(wait,brk) ← add_succ1 wait a;
let passed = insert a passed;
RETURN (passed, wait, brk)
}
}
)
(passed, wait, False);
RETURN brk
}
}
"
lemma worklist_algo1_ref[refine]: "worklist_algo1 ≤ ⇓Id worklist_algo"
unfolding worklist_algo1_def worklist_algo_def
apply (refine_rcg)
apply refine_dref_type
unfolding worklist_inv_def
apply auto
done
end
end
Theory Worklist_Subsumption_Impl
theory Worklist_Subsumption_Impl
imports "../IICF/IICF" Worklist_Subsumption
begin
locale Worklist2_Defs = Worklist1_Defs +
fixes A :: "'a ⇒ 'ai ⇒ assn"
fixes succsi :: "'ai ⇒ 'ai list Heap"
fixes a⇩0i :: "'ai Heap"
fixes Fi :: "'ai ⇒ bool Heap"
fixes Lei :: "'ai ⇒ 'ai ⇒ bool Heap"
locale Worklist2 = Worklist2_Defs + Worklist1 +
assumes [sepref_fr_rules]: "(uncurry0 a⇩0i, uncurry0 (RETURN (PR_CONST a⇩0))) ∈ unit_assn⇧k →⇩a A"
assumes [sepref_fr_rules]: "(Fi,RETURN o PR_CONST F) ∈ A⇧k →⇩a bool_assn"
assumes [sepref_fr_rules]: "(uncurry Lei,uncurry (RETURN oo PR_CONST (≼))) ∈ A⇧k *⇩a A⇧k →⇩a bool_assn"
assumes [sepref_fr_rules]: "(succsi,RETURN o PR_CONST succs) ∈ A⇧k →⇩a list_assn A"
begin
sepref_register "PR_CONST a⇩0" "PR_CONST F" "PR_CONST (≼)" "PR_CONST succs"
lemma [def_pat_rules]:
"a⇩0 ≡ UNPROTECT a⇩0" "F ≡ UNPROTECT F" "(≼) ≡ UNPROTECT (≼)" "succs ≡ UNPROTECT succs"
by simp_all
lemma take_from_mset_as_mop_mset_pick: "take_from_mset = mop_mset_pick"
apply (intro ext)
unfolding take_from_mset_def[abs_def]
by (auto simp: pw_eq_iff refine_pw_simps)
lemma [safe_constraint_rules]: "CN_FALSE is_pure A ⟹ is_pure A" by simp
sepref_thm worklist_algo2 is "uncurry0 worklist_algo1" :: "unit_assn⇧k →⇩a bool_assn"
unfolding worklist_algo1_def add_succ1_def
supply [[goals_limit = 1]]
apply (rewrite in "Let ⌑ _" lso_fold_custom_empty)
apply (rewrite in "{#a⇩0#}" lmso_fold_custom_empty)
unfolding take_from_mset_as_mop_mset_pick fold_lso_bex
by sepref
end
concrete_definition worklist_algo2
for Lei a⇩0i Fi succsi
uses Worklist2.worklist_algo2.refine_raw is "(uncurry0 ?f,_)∈_"
thm worklist_algo2_def
context Worklist2 begin
lemma Worklist2_this: "Worklist2 E a⇩0 F (≼) succs A succsi a⇩0i Fi Lei"
by unfold_locales
lemma hnr_F_reachable: "(uncurry0 (worklist_algo2 Lei a⇩0i Fi succsi), uncurry0 (RETURN F_reachable))
∈ unit_assn⇧k →⇩a bool_assn"
using worklist_algo2.refine[OF Worklist2_this,
FCOMP worklist_algo1_ref[THEN nres_relI],
FCOMP worklist_algo_correct[THEN Id_SPEC_refine, THEN nres_relI]]
by (simp add: RETURN_def)
end
context Worklist1 begin
sepref_decl_op F_reachable :: "bool_rel" .
lemma [def_pat_rules]: "F_reachable ≡ op_F_reachable" by simp
lemma hnr_op_F_reachable:
assumes "GEN_ALGO a⇩0i (λa⇩0i. (uncurry0 a⇩0i, uncurry0 (RETURN a⇩0)) ∈ unit_assn⇧k →⇩a A)"
assumes "GEN_ALGO Fi (λFi. (Fi,RETURN o F) ∈ A⇧k →⇩a bool_assn)"
assumes "GEN_ALGO Lei (λLei. (uncurry Lei,uncurry (RETURN oo (≼))) ∈ A⇧k *⇩a A⇧k →⇩a bool_assn)"
assumes "GEN_ALGO succsi (λsuccsi. (succsi,RETURN o succs) ∈ A⇧k →⇩a list_assn A)"
shows "(uncurry0 (worklist_algo2 Lei a⇩0i Fi succsi), uncurry0 (RETURN (PR_CONST op_F_reachable)))
∈ unit_assn⇧k →⇩a bool_assn"
proof -
from assms interpret Worklist2 E a⇩0 F "(≼)" succs A succsi a⇩0i Fi Lei
by (unfold_locales; simp add: GEN_ALGO_def)
from hnr_F_reachable show ?thesis by simp
qed
sepref_decl_impl hnr_op_F_reachable .
end
end
Theory Sepref_Snip_Datatype
section ‹Non-Recursive Algebraic Datatype›
theory Sepref_Snip_Datatype
imports "../../IICF/IICF"
begin
text ‹We define a non-recursive datatype›
datatype 'a enum = E1 'a | E2 'a | E3 | E4 'a 'a | E5 bool 'a
subsection ‹Refinement Assertion›
fun enum_assn where
"enum_assn A (E1 x) (E1 x') = A x x'"
| "enum_assn A (E2 x) (E2 x') = A x x'"
| "enum_assn A (E3) (E3) = emp"
| "enum_assn A (E4 x y) (E4 x' y') = A x x' * A y y'"
| "enum_assn A (E5 x y) (E5 x' y') = bool_assn x x' * A y y'"
| "enum_assn _ _ _ = false"
text ‹You might want to prove some properties›
text ‹A pure-rule is required to enable recovering of invalidated data that was not stored on the heap›
lemma enum_assn_pure[safe_constraint_rules]: "is_pure A ⟹ is_pure (enum_assn A)"
apply (auto simp: is_pure_iff_pure_assn)
apply (rename_tac x x')
apply (case_tac x; case_tac x'; simp add: pure_def)
done
text ‹An identitiy rule is required to easily prove trivial refinement theorems›
lemma enum_assn_id[simp]: "enum_assn id_assn = id_assn"
apply (intro ext)
subgoal for x y by (cases x; cases y; simp add: pure_def)
done
text ‹Structural rules. ›
text ‹Without congruence condition›
lemma enum_match_nocong: "⟦⋀x y. hn_ctxt A x y ⟹⇩t hn_ctxt A' x y⟧ ⟹ hn_ctxt (enum_assn A) e e' ⟹⇩t hn_ctxt (enum_assn A') e e'"
by (cases e; cases e'; simp add: hn_ctxt_def entt_star_mono)
lemma enum_merge_nocong:
assumes "⋀x y. hn_ctxt A x y ∨⇩A hn_ctxt A' x y ⟹⇩A hn_ctxt Am x y"
shows "hn_ctxt (enum_assn A) e e' ∨⇩A hn_ctxt (enum_assn A') e e' ⟹⇩A hn_ctxt (enum_assn Am) e e'"
using assms
by (cases e; cases e'; simp add: hn_ctxt_def ent_disj_star_mono)
text ‹With congruence condition›
lemma enum_match_cong[sepref_frame_match_rules]:
"⟦⋀x y. ⟦x∈set_enum e; y∈set_enum e'⟧ ⟹ hn_ctxt A x y ⟹⇩t hn_ctxt A' x y⟧ ⟹ hn_ctxt (enum_assn A) e e' ⟹⇩t hn_ctxt (enum_assn A') e e'"
by (cases e; cases e'; simp add: hn_ctxt_def entt_star_mono)
lemma enum_merge_cong[sepref_frame_merge_rules]:
assumes "⋀x y. ⟦x∈set_enum e; y∈set_enum e'⟧ ⟹ hn_ctxt A x y ∨⇩A hn_ctxt A' x y ⟹⇩t hn_ctxt Am x y"
shows "hn_ctxt (enum_assn A) e e' ∨⇩A hn_ctxt (enum_assn A') e e' ⟹⇩t hn_ctxt (enum_assn Am) e e'"
apply (blast intro: entt_disjE enum_match_cong entt_disjD1[OF assms] entt_disjD2[OF assms])
done
text ‹Propagating invalid›
lemma entt_invalid_enum: "hn_invalid (enum_assn A) e e' ⟹⇩t hn_ctxt (enum_assn (invalid_assn A)) e e'"
apply (simp add: hn_ctxt_def invalid_assn_def[abs_def])
apply (rule enttI)
apply clarsimp
apply (cases e; cases e'; auto simp: mod_star_conv pure_def)
done
lemmas invalid_enum_merge[sepref_frame_merge_rules] = gen_merge_cons[OF entt_invalid_enum]
subsection ‹Constructors›
text ‹Constructors need to be registered›
sepref_register E1 E2 E3 E4 E5
text ‹Refinement rules can be proven straightforwardly on the separation logic level (method @{method sepref_to_hoare})›
lemma [sepref_fr_rules]: "(return o E1,RETURN o E1) ∈ A⇧d →⇩a enum_assn A"
by sepref_to_hoare sep_auto
lemma [sepref_fr_rules]: "(return o E2,RETURN o E2) ∈ A⇧d →⇩a enum_assn A"
by sepref_to_hoare sep_auto
lemma [sepref_fr_rules]: "(uncurry0 (return E3),uncurry0 (RETURN E3)) ∈ unit_assn⇧k →⇩a enum_assn A"
by sepref_to_hoare sep_auto
lemma [sepref_fr_rules]: "(uncurry (return oo E4),uncurry (RETURN oo E4)) ∈ A⇧d*⇩aA⇧d →⇩a enum_assn A"
by sepref_to_hoare sep_auto
lemma [sepref_fr_rules]: "(uncurry (return oo E5),uncurry (RETURN oo E5)) ∈ bool_assn⇧k*⇩aA⇧d →⇩a enum_assn A"
by sepref_to_hoare (sep_auto simp: pure_def)
subsection ‹Destructor›
text ‹There is currently no automation for destructors, so all the registration boilerplate
needs to be done manually›
text ‹Set ups operation identification heuristics›
sepref_register case_enum
text ‹In the monadify phase, this eta-expands to make visible all required arguments›
lemma [sepref_monadify_arity]: "case_enum ≡ λ⇩2f1 f2 f3 f4 f5 x. SP case_enum$(λ⇩2x. f1$x)$(λ⇩2x. f2$x)$f3$(λ⇩2x y. f4$x$y)$(λ⇩2x y. f5$x$y)$x"
by simp
text ‹This determines an evaluation order for the first-order operands›
lemma [sepref_monadify_comb]: "case_enum$f1$f2$f3$f4$f5$x ≡ (⤜)$(EVAL$x)$(λ⇩2x. SP case_enum$f1$f2$f3$f4$f5$x)" by simp
text ‹This enables translation of the case-distinction in a non-monadic context.›
lemma [sepref_monadify_comb]: "EVAL$(case_enum$(λ⇩2x. f1 x)$(λ⇩2x. f2 x)$f3$(λ⇩2x y. f4 x y)$(λ⇩2x y. f5 x y)$x)
≡ (⤜)$(EVAL$x)$(λ⇩2x. SP case_enum$(λ⇩2x. EVAL $ f1 x)$(λ⇩2x. EVAL $ f2 x)$(EVAL $ f3)$(λ⇩2x y. EVAL $ f4 x y)$(λ⇩2x y. EVAL $ f5 x y)$x)"
apply (rule eq_reflection)
by (simp split: enum.splits)
text ‹Auxiliary lemma, to lift simp-rule over ‹hn_ctxt››
lemma enum_assn_ctxt: "enum_assn A x y = z ⟹ hn_ctxt (enum_assn A) x y = z"
by (simp add: hn_ctxt_def)
text ‹The cases lemma first extracts the refinement for the datatype from the precondition.
Next, it generate proof obligations to refine the functions for every case.
Finally the postconditions of the refinement are merged.
Note that we handle the
destructed values separately, to allow reconstruction of the original datatype after the case-expression.
Moreover, we provide (invalidated) versions of the original compound value to the cases,
which allows access to pure compound values from inside the case.
›
lemma enum_cases_hnr:
fixes A e e'
defines [simp]: "INVe ≡ hn_invalid (enum_assn A) e e'"
assumes FR: "Γ ⟹⇩t hn_ctxt (enum_assn A) e e' * F"
assumes E1: "⋀x1 x1a. ⟦e = E1 x1; e' = E1 x1a⟧ ⟹ hn_refine (hn_ctxt A x1 x1a * INVe * F) (f1' x1a) (hn_ctxt A1' x1 x1a * hn_ctxt XX1 e e' * Γ1') R (f1 x1)"
assumes E2: "⋀x2 x2a. ⟦e = E2 x2; e' = E2 x2a⟧ ⟹ hn_refine (hn_ctxt A x2 x2a * INVe * F) (f2' x2a) (hn_ctxt A2' x2 x2a * hn_ctxt XX2 e e' * Γ2') R (f2 x2)"
assumes E3: "⟦e = E3; e' = E3⟧ ⟹ hn_refine (hn_ctxt (enum_assn A) e e' * F) f3' (hn_ctxt XX3 e e' * Γ3') R f3"
assumes E4: "⋀x41 x42 x41a x42a.
⟦e = E4 x41 x42; e' = E4 x41a x42a⟧
⟹ hn_refine (hn_ctxt A x41 x41a * hn_ctxt A x42 x42a * INVe * F) (f4' x41a x42a) (hn_ctxt A4a' x41 x41a * hn_ctxt A4b' x42 x42a * hn_ctxt XX4 e e' * Γ4') R
(f4 x41 x42)"
assumes E5: "⋀x51 x52 x51a x52a.
⟦e = E5 x51 x52; e' = E5 x51a x52a⟧
⟹ hn_refine (hn_ctxt bool_assn x51 x51a * hn_ctxt A x52 x52a * INVe * F) (f5' x51a x52a)
(hn_ctxt bool_assn x51 x51a * hn_ctxt A5' x52 x52a * hn_ctxt XX5 e e' * Γ5') R (f5 x51 x52)"
assumes MERGE1[unfolded hn_ctxt_def]: "⋀x x'. hn_ctxt A1' x x' ∨⇩A hn_ctxt A2' x x' ∨⇩A hn_ctxt A3' x x' ∨⇩A hn_ctxt A4a' x x' ∨⇩A hn_ctxt A4b' x x' ∨⇩A hn_ctxt A5' x x' ⟹⇩t hn_ctxt A' x x'"
assumes MERGE2[unfolded hn_ctxt_def]: "Γ1' ∨⇩A Γ2' ∨⇩A Γ3' ∨⇩A Γ4' ∨⇩A Γ5' ⟹⇩t Γ'"
shows "hn_refine Γ (case_enum f1' f2' f3' f4' f5' e') (hn_ctxt (enum_assn A') e e' * Γ') R (case_enum$(λ⇩2x. f1 x)$(λ⇩2x. f2 x)$f3$(λ⇩2x y. f4 x y)$(λ⇩2x y. f5 x y)$e)"
apply (rule hn_refine_cons_pre[OF FR])
apply1 extract_hnr_invalids
apply (cases e; cases e'; simp add: enum_assn.simps[THEN enum_assn_ctxt])
subgoal
apply (rule hn_refine_cons[OF _ E1 _ entt_refl]; assumption?)
applyS (simp add: hn_ctxt_def)
apply (rule entt_star_mono)
apply1 (rule entt_fr_drop)
apply1 (rule entt_trans[OF _ MERGE1])
applyS (simp add: hn_ctxt_def entt_disjI1' entt_disjI2')
apply1 (rule entt_trans[OF _ MERGE2])
applyS (simp add: entt_disjI1' entt_disjI2')
done
subgoal
apply (rule hn_refine_cons[OF _ E2 _ entt_refl]; assumption?)
applyS (simp add: hn_ctxt_def)
apply (rule entt_star_mono)
apply1 (rule entt_fr_drop)
apply1 (rule entt_trans[OF _ MERGE1])
applyS (simp add: hn_ctxt_def entt_disjI1' entt_disjI2')
apply1 (rule entt_trans[OF _ MERGE2])
applyS (simp add: entt_disjI1' entt_disjI2')
done
subgoal
apply (rule hn_refine_cons[OF _ E3 _ entt_refl]; assumption?)
applyS (simp add: hn_ctxt_def)
apply (subst mult.commute, rule entt_fr_drop)
apply (rule entt_trans[OF _ MERGE2])
apply (simp add: entt_disjI1' entt_disjI2')
done
subgoal
apply (rule hn_refine_cons[OF _ E4 _ entt_refl]; assumption?)
applyS (simp add: hn_ctxt_def)
apply (rule entt_star_mono)
apply1 (rule entt_fr_drop)
apply (rule entt_star_mono)
apply1 (rule entt_trans[OF _ MERGE1])
applyS (simp add: hn_ctxt_def entt_disjI1' entt_disjI2')
apply1 (rule entt_trans[OF _ MERGE1])
applyS (simp add: hn_ctxt_def entt_disjI1' entt_disjI2')
apply1 (rule entt_trans[OF _ MERGE2])
applyS (simp add: entt_disjI1' entt_disjI2')
done
subgoal
apply (rule hn_refine_cons[OF _ E5 _ entt_refl]; assumption?)
applyS (simp add: hn_ctxt_def)
apply (rule entt_star_mono)
apply1 (rule entt_fr_drop)
apply (rule entt_star_mono)
apply1 (rule ent_imp_entt)
applyS (simp add: hn_ctxt_def)
apply1 (rule entt_trans[OF _ MERGE1])
applyS (simp add: hn_ctxt_def entt_disjI1' entt_disjI2')
apply1 (rule entt_trans[OF _ MERGE2])
applyS (simp add: entt_disjI1' entt_disjI2')
done
done
text ‹After some more preprocessing (adding extra frame-rules for non-atomic postconditions,
and splitting the merge-terms into binary merges), this rule can be registered›
lemmas [sepref_comb_rules] = enum_cases_hnr[sepref_prep_comb_rule]
subsection ‹Regression Test›
definition "test1 (e::bool enum) ≡ RETURN e"
sepref_definition test1_impl is "test1" :: "(enum_assn bool_assn)⇧d →⇩a enum_assn bool_assn"
unfolding test1_def[abs_def] by sepref
sepref_register test1
lemmas [sepref_fr_rules] = test1_impl.refine
definition "test ≡ do {
let x = E1 True;
_ ← case x of
E1 a ⇒ RETURN (Some a)
| _ ⇒ RETURN (Some True);
_ ← test1 x;
_ ← if True then
case x of
E1 a ⇒ RETURN (Some a)
| _ ⇒ RETURN (Some True)
else RETURN None;
_ ← test1 x;
let a = op_array_replicate 4 (3::nat);
let x = E5 False a;
_ ← case x of
E1 _ ⇒ RETURN (0::nat)
| E2 _ ⇒ RETURN 1
| E3 ⇒ RETURN 0
| E4 _ _ ⇒ RETURN 0
| E5 _ a ⇒ mop_list_get a 0;
case x of
E1 a ⇒ do {mop_list_set a 0 0; RETURN (0::nat)}
| E2 _ ⇒ RETURN 1
| E3 ⇒ RETURN 0
| E4 _ _ ⇒ RETURN 0
| E5 _ _ ⇒ RETURN 0
}"
lemmas [safe_constraint_rules] = CN_FALSEI[of is_pure "invalid_assn A" for A]
sepref_definition foo is "uncurry0 test" :: "unit_assn⇧k →⇩a nat_assn"
unfolding test_def
supply [[goals_limit=1]]
by sepref
end
Theory Sepref_Snip_Combinator
section ‹Snippet to Define Custom Combinators›
theory Sepref_Snip_Combinator
imports "../../IICF/IICF"
begin
subsection ‹Definition of the Combinator›
text ‹
Currently, when defining new combinators, you are largely on your own.
If you can show your combinator equivalent to some other, already existing,
combinator, you should apply this equivalence in the monadify phase.
In this example, we show the development of a map combinator from scratch.
›
text ‹We set ourselves in to a context where we fix the abstract and concrete
arguments of the monadic map combinator, as well as the refinement assertions,
and a frame, that represents the remaining heap content, and may be read by the map-function. ›
context
fixes f :: "'a ⇒ 'b nres"
fixes l :: "'a list"
fixes fi :: "'ai ⇒ 'bi Heap"
fixes li :: "'ai list"
fixes A A' :: "'a ⇒ 'ai ⇒ assn"
fixes B :: "'b ⇒ 'bi ⇒ assn"
fixes F :: assn
notes [[sepref_register_adhoc f l]]
assumes f_rl: "hn_refine (hn_ctxt A x xi * F) (fi xi) (hn_ctxt A' x xi * F) B (f$x)"
begin
text ‹We implement our combinator using the monadic refinement framework.›
definition "mmap ≡ RECT (λmmap.
λ[] ⇒ RETURN []
| x#xs ⇒ do { x ← f x; xs ← mmap xs; RETURN (x#xs) }) l"
subsection ‹Synthesis of Implementation›
text ‹In order to propagate the frame ‹F› during synthesis, we use a trick: We wrap the
frame into a dummy refinement assertion. This way, sepref recognizes the frame just as
another context element, and does correct propagation.›
definition "F_assn (x::unit) (y::unit) ≡ F"
lemma F_unf: "hn_ctxt F_assn x y = F"
by (auto simp: F_assn_def hn_ctxt_def)
text ‹We build a combinator rule to refine ‹f›. We need a combinator rule here,
because ‹f› does not only depend on its formal arguments, but also on the frame
(represented as dummy argument). ›
lemma f_rl': "hn_refine (hn_ctxt A x xi * hn_ctxt (F_assn) dx dxi) (fi xi) (hn_ctxt A' x xi * hn_ctxt (F_assn) dx dxi) B (f$x)"
unfolding F_unf by (rule f_rl)
text ‹Then we use the Sepref tool to synthesize an implementation of ‹mmap›.›
schematic_goal mmap_impl:
notes [sepref_comb_rules] = hn_refine_frame[OF f_rl']
shows "hn_refine (hn_ctxt (list_assn A) l li * hn_ctxt (F_assn) dx dxi) (?c::?'c Heap) ?Γ' ?R mmap"
unfolding mmap_def "HOL_list.fold_custom_empty"
apply sepref_dbg_keep
done
text ‹We unfold the wrapped frame›
lemmas mmap_impl' = mmap_impl[unfolded F_unf]
end
subsection ‹Setup for Sepref›
text ‹Outside the context, we extract the synthesized implementation as a new constant, and set up
code theorems for the fixed-point combinators.›
concrete_definition mmap_impl uses mmap_impl'
prepare_code_thms mmap_impl_def
text ‹Moreover, we have to manually declare arity and monadify theorems.
The arity theorem ensures that we always have a constant number of operators, and
the monadify theorem determines an execution order: The list-argument is evaluated first.
›
lemma mmap_arity[sepref_monadify_arity]: "mmap ≡ λ⇩2f l. SP mmap$(λ⇩2x. f$x)$l" by simp
lemma mmap_mcomb[sepref_monadify_comb]: "mmap$f$x ≡ (⤜)$(EVAL$x)$(λ⇩2x. SP mmap$f$x)" by simp
text ‹We can massage the refinement theorem @{thm mmap_impl.refine} a bit, to get a valid
combinator rule›
print_statement hn_refine_cons_pre[OF _ mmap_impl.refine, sepref_prep_comb_rule, no_vars]
lemma mmap_comb_rl[sepref_comb_rules]:
assumes "P ⟹⇩t hn_ctxt (list_assn A) l li * F"
and "⋀x xi. hn_refine (hn_ctxt A x xi * F) (fi xi) (Q x xi) B (f x)"
and "⋀x xi. Q x xi ⟹⇩t hn_ctxt A' x xi * F"
shows "hn_refine P (mmap_impl fi li) (hn_ctxt (list_assn A') l li * F) (list_assn B) (mmap$(λ⇩2x. f x)$l)"
unfolding APP_def PROTECT2_def
using hn_refine_cons_pre[OF _ mmap_impl.refine, sepref_prep_comb_rule, of P A l li F fi Q B f A']
using assms
by simp
subsection ‹Example›
text ‹Finally, we can test our combinator. Note how the
map-function accesses the array on the heap, which is not among its arguments.
This is only possible as we passed around a frame. ›
sepref_thm test_mmap
is "λl. do { let a = op_array_of_list [True,True,False]; mmap (λx. do { mop_list_get a (x mod 3) }) l }"
:: "(list_assn nat_assn)⇧k →⇩a list_assn bool_assn"
unfolding HOL_list.fold_custom_empty
by sepref
subsection ‹Limitations›
text ‹
Currently, the major limitation is that combinator rules are fixed to specific data types.
In our example, we did an implementation for HOL lists. We cannot come up with an alternative implementation,
for, e.g., array-lists, but have to use a different abstract combinator.
One workaround is to use some generic operations, as is done for foreach-loops, which require
a generic to-list operation. However, in this case, we produce unwanted intermediate lists, and
would have to add complicated a-posteriori deforestation optimizations.
›
end
Theory Sepref_Chapter_Benchmarks
chapter ‹Benchmarks›
text ‹Contains the benchmarks of the IRF/IICF. See the README file in the
benchmark folder for more information on how to run the benchmarks.›
theory Sepref_Chapter_Benchmarks
imports Main
begin
end
Theory Heapmap_Bench
theory Heapmap_Bench
imports
"../../../IICF/Impl/Heaps/IICF_Impl_Heapmap"
"../../../Sepref_ICF_Bindings"
begin
definition rrand :: "uint32 ⇒ uint32"
where "rrand s ≡ (s * 1103515245 + 12345) AND 0x7FFFFFFF"
definition rand :: "uint32 ⇒ nat ⇒ (uint32 * nat)" where
"rand s m ≡ let
s = rrand s;
r = nat_of_uint32 s;
r = (r * m) div 0x80000000
in (s,r)"
partial_function (heap) rep where "rep i N f s = (
if i<N then do {
s ← f s i;
rep (i+1) N f s
} else return s
)"
declare rep.simps[code]
term hm_insert_op_impl
definition "testsuite N ≡ do {
let s=0;
let N2=efficient_nat_div2 N;
hm ← hm_empty_op_impl N;
(hm,s) ← rep 0 N (λ(hm,s) i. do {
let (s,v) = rand s N2;
hm ← hm_insert_op_impl N id i v hm;
return (hm,s)
}) (hm,s);
(hm,s) ← rep 0 N (λ(hm,s) i. do {
let (s,v) = rand s N2;
hm ← hm_change_key_op_impl id i v hm;
return (hm,s)
}) (hm,s);
hm ← rep 0 N (λhm i. do {
(_,hm) ← hm_pop_min_op_impl id hm;
return hm
}) hm;
return ()
}"
export_code rep in SML_imp
partial_function (tailrec) drep where "drep i N f s = (
if i<N then drep (i+1) N f (f s i)
else s
)"
declare drep.simps[code]
term aluprioi.insert
term aluprioi.empty
term aluprioi.pop
definition "ftestsuite N ≡ do {
let s=0;
let N2=efficient_nat_div2 N;
let hm= aluprioi.empty ();
let (hm,s) = drep 0 N (λ(hm,s) i. do {
let (s,v) = rand s N2;
let hm = aluprioi.insert hm i v;
(hm,s)
}) (hm,s);
let (hm,s) = drep 0 N (λ(hm,s) i. do {
let (s,v) = rand s N2;
let hm = aluprioi.insert hm i v;
(hm,s)
}) (hm,s);
let hm = drep 0 N (λhm i. do {
let (_,_,hm) = aluprioi.pop hm;
hm
}) hm;
()
}"
export_code
testsuite ftestsuite
nat_of_integer integer_of_nat
in SML_imp module_name Heapmap
file ‹heapmap_export.sml›
end
Theory Dijkstra_Benchmark
theory Dijkstra_Benchmark
imports "../../../Examples/Sepref_Dijkstra"
Dijkstra_Shortest_Path.Test
begin
definition nat_cr_graph_imp
:: "nat ⇒ (nat × nat × nat) list ⇒ nat graph_impl Heap"
where "nat_cr_graph_imp ≡ cr_graph"
concrete_definition nat_dijkstra_imp uses dijkstra_imp_def[where 'W=nat]
prepare_code_thms nat_dijkstra_imp_def
lemma nat_dijkstra_imp_eq: "nat_dijkstra_imp = dijkstra_imp"
unfolding dijkstra_imp_def[abs_def] nat_dijkstra_imp_def[abs_def]
by simp
definition "nat_cr_graph_fun nn es ≡ hlg_from_list_nat ([0..<nn], es)"
export_code
integer_of_nat nat_of_integer
ran_graph
nat_cr_graph_fun nat_dijkstra
nat_cr_graph_imp nat_dijkstra_imp
in SML_imp module_name Dijkstra
file ‹dijkstra_export.sml›
end
Theory NDFS_Benchmark
theory NDFS_Benchmark
imports
Collections_Examples.Nested_DFS
"../../../Examples/Sepref_NDFS"
Separation_Logic_Imperative_HOL.From_List_GA
begin
locale bm_fun begin
schematic_goal succ_of_list_impl:
notes [autoref_tyrel] =
ty_REL[where 'a="nat⇀nat set" and R="⟨nat_rel,R⟩dflt_rm_rel" for R]
ty_REL[where 'a="nat set" and R="⟨nat_rel⟩list_set_rel"]
shows "(?f::?'c,succ_of_list) ∈ ?R"
unfolding succ_of_list_def[abs_def]
apply (autoref (keep_goal))
done
concrete_definition succ_of_list_impl uses succ_of_list_impl
schematic_goal acc_of_list_impl:
notes [autoref_tyrel] =
ty_REL[where 'a="nat set" and R="⟨nat_rel⟩dflt_rs_rel" for R]
shows "(?f::?'c,acc_of_list) ∈ ?R"
unfolding acc_of_list_def[abs_def]
apply (autoref (keep_goal))
done
concrete_definition acc_of_list_impl uses acc_of_list_impl
schematic_goal red_dfs_impl_refine_aux:
fixes u'::"nat" and V'::"nat set"
notes [autoref_tyrel] =
ty_REL[where 'a="nat set" and R="⟨nat_rel⟩dflt_rs_rel"]
assumes [autoref_rules]:
"(u,u')∈nat_rel"
"(V,V')∈⟨nat_rel⟩dflt_rs_rel"
"(onstack,onstack')∈⟨nat_rel⟩dflt_rs_rel"
"(E,E')∈⟨nat_rel⟩slg_rel"
shows "(RETURN (?f::?'c), red_dfs E' onstack' V' u') ∈ ?R"
apply -
unfolding red_dfs_def
apply (autoref_monadic)
done
concrete_definition red_dfs_impl uses red_dfs_impl_refine_aux
prepare_code_thms red_dfs_impl_def
declare red_dfs_impl.refine[autoref_higher_order_rule, autoref_rules]
schematic_goal ndfs_impl_refine_aux:
fixes s::"nat" and succi
notes [autoref_tyrel] =
ty_REL[where 'a="nat set" and R="⟨nat_rel⟩dflt_rs_rel"]
assumes [autoref_rules]:
"(succi,E)∈⟨nat_rel⟩slg_rel"
"(Ai,A)∈⟨nat_rel⟩dflt_rs_rel"
notes [autoref_rules] = IdI[of s]
shows "(RETURN (?f::?'c), blue_dfs E A s) ∈ ⟨?R⟩nres_rel"
unfolding blue_dfs_def
apply (autoref_monadic (trace))
done
concrete_definition fun_ndfs_impl for succi Ai s uses ndfs_impl_refine_aux
prepare_code_thms fun_ndfs_impl_def
definition "fun_succ_of_list ≡
succ_of_list_impl o map (λ(u,v). (nat_of_integer u, nat_of_integer v))"
definition "fun_acc_of_list ≡
acc_of_list_impl o map nat_of_integer"
end
interpretation "fun": bm_fun .
locale bm_funs begin
schematic_goal succ_of_list_impl:
notes [autoref_tyrel] =
ty_REL[where 'a="nat⇀nat set" and R="⟨nat_rel,R⟩iam_map_rel" for R]
ty_REL[where 'a="nat set" and R="⟨nat_rel⟩list_set_rel"]
shows "(?f::?'c,succ_of_list) ∈ ?R"
unfolding succ_of_list_def[abs_def]
apply (autoref (keep_goal))
done
concrete_definition succ_of_list_impl uses succ_of_list_impl
schematic_goal acc_of_list_impl:
notes [autoref_tyrel] =
ty_REL[where 'a="nat set" and R="⟨nat_rel⟩iam_set_rel" for R]
shows "(?f::?'c,acc_of_list) ∈ ?R"
unfolding acc_of_list_def[abs_def]
apply (autoref (keep_goal))
done
concrete_definition acc_of_list_impl uses acc_of_list_impl
schematic_goal red_dfs_impl_refine_aux:
fixes u'::"nat" and V'::"nat set"
notes [autoref_tyrel] =
ty_REL[where 'a="nat set" and R="⟨nat_rel⟩iam_set_rel"]
assumes [autoref_rules]:
"(u,u')∈nat_rel"
"(V,V')∈⟨nat_rel⟩iam_set_rel"
"(onstack,onstack')∈⟨nat_rel⟩iam_set_rel"
"(E,E')∈⟨nat_rel⟩slg_rel"
shows "(RETURN (?f::?'c), red_dfs E' onstack' V' u') ∈ ?R"
apply -
unfolding red_dfs_def
apply (autoref_monadic)
done
concrete_definition red_dfs_impl uses red_dfs_impl_refine_aux
prepare_code_thms red_dfs_impl_def
declare red_dfs_impl.refine[autoref_higher_order_rule, autoref_rules]
schematic_goal ndfs_impl_refine_aux:
fixes s::"nat" and succi
notes [autoref_tyrel] =
ty_REL[where 'a="nat set" and R="⟨nat_rel⟩iam_set_rel"]
assumes [autoref_rules]:
"(succi,E)∈⟨nat_rel⟩slg_rel"
"(Ai,A)∈⟨nat_rel⟩iam_set_rel"
notes [autoref_rules] = IdI[of s]
shows "(RETURN (?f::?'c), blue_dfs E A s) ∈ ⟨?R⟩nres_rel"
unfolding blue_dfs_def
apply (autoref_monadic (trace))
done
concrete_definition funs_ndfs_impl for succi Ai s uses ndfs_impl_refine_aux
prepare_code_thms funs_ndfs_impl_def
definition "funs_succ_of_list ≡
succ_of_list_impl o map (λ(u,v). (nat_of_integer u, nat_of_integer v))"
definition "funs_acc_of_list ≡
acc_of_list_impl o map nat_of_integer"
end
interpretation "funs": bm_funs .
definition "imp_ndfs_impl ≡ blue_dfs_impl"
definition "imp_ndfs_sz_impl ≡ blue_dfs_impl_sz"
definition "imp_acc_of_list l ≡ From_List_GA.ias_from_list (map nat_of_integer l)"
definition "imp_graph_of_list n l ≡ cr_graph (nat_of_integer n) (map (pairself nat_of_integer) l)"
export_code
nat_of_integer integer_of_nat
fun.fun_ndfs_impl fun.fun_succ_of_list fun.fun_acc_of_list
funs.funs_ndfs_impl funs.funs_succ_of_list funs.funs_acc_of_list
imp_ndfs_impl imp_ndfs_sz_impl imp_acc_of_list imp_graph_of_list
in SML_imp module_name NDFS_Benchmark file ‹NDFS_Benchmark_export.sml›
ML_val ‹open Time›
end